diff --git a/owl-bot-staging/google-cloud-asset/v1/.coveragerc b/owl-bot-staging/google-cloud-asset/v1/.coveragerc deleted file mode 100644 index 801f6d8a1da7..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/asset/__init__.py - google/cloud/asset/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-asset/v1/.flake8 b/owl-bot-staging/google-cloud-asset/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-asset/v1/MANIFEST.in b/owl-bot-staging/google-cloud-asset/v1/MANIFEST.in deleted file mode 100644 index 5c97e27612a3..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/asset *.py -recursive-include google/cloud/asset_v1 *.py diff --git a/owl-bot-staging/google-cloud-asset/v1/README.rst b/owl-bot-staging/google-cloud-asset/v1/README.rst deleted file mode 100644 index a10b3ef1e958..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Asset API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Asset API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-asset/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-asset/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/asset_service.rst b/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/asset_service.rst deleted file mode 100644 index b2f80a4bd4ba..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/asset_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AssetService ------------------------------- - -.. automodule:: google.cloud.asset_v1.services.asset_service - :members: - :inherited-members: - -.. automodule:: google.cloud.asset_v1.services.asset_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/services_.rst b/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/services_.rst deleted file mode 100644 index a5ddb91fe4be..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Asset v1 API -====================================== -.. toctree:: - :maxdepth: 2 - - asset_service diff --git a/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/types_.rst b/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/types_.rst deleted file mode 100644 index 750d9c160357..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/docs/asset_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Asset v1 API -=================================== - -.. automodule:: google.cloud.asset_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-asset/v1/docs/conf.py b/owl-bot-staging/google-cloud-asset/v1/docs/conf.py deleted file mode 100644 index fa7647914fb5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-asset documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-asset" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-asset-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-asset.tex", - u"google-cloud-asset Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-asset", - u"Google Cloud Asset Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-asset", - u"google-cloud-asset Documentation", - author, - "google-cloud-asset", - "GAPIC library for Google Cloud Asset API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-asset/v1/docs/index.rst b/owl-bot-staging/google-cloud-asset/v1/docs/index.rst deleted file mode 100644 index df4eb53564ad..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - asset_v1/services_ - asset_v1/types_ diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/__init__.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/__init__.py deleted file mode 100644 index 7248a9576ba0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/__init__.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient -from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient - -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningMetadata -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeMoveRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeMoveResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest -from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse -from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicy -from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicyConstraint -from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryRequest -from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryResponse -from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesRequest -from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesResponse -from google.cloud.asset_v1.types.asset_service import BigQueryDestination -from google.cloud.asset_v1.types.asset_service import CreateFeedRequest -from google.cloud.asset_v1.types.asset_service import CreateSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import DeleteFeedRequest -from google.cloud.asset_v1.types.asset_service import DeleteSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import ExportAssetsRequest -from google.cloud.asset_v1.types.asset_service import ExportAssetsResponse -from google.cloud.asset_v1.types.asset_service import Feed -from google.cloud.asset_v1.types.asset_service import FeedOutputConfig -from google.cloud.asset_v1.types.asset_service import GcsDestination -from google.cloud.asset_v1.types.asset_service import GcsOutputResult -from google.cloud.asset_v1.types.asset_service import GetFeedRequest -from google.cloud.asset_v1.types.asset_service import GetSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig -from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery -from google.cloud.asset_v1.types.asset_service import ListAssetsRequest -from google.cloud.asset_v1.types.asset_service import ListAssetsResponse -from google.cloud.asset_v1.types.asset_service import ListFeedsRequest -from google.cloud.asset_v1.types.asset_service import ListFeedsResponse -from google.cloud.asset_v1.types.asset_service import ListSavedQueriesRequest -from google.cloud.asset_v1.types.asset_service import ListSavedQueriesResponse -from google.cloud.asset_v1.types.asset_service import MoveAnalysis -from google.cloud.asset_v1.types.asset_service import MoveAnalysisResult -from google.cloud.asset_v1.types.asset_service import MoveImpact -from google.cloud.asset_v1.types.asset_service import OutputConfig -from google.cloud.asset_v1.types.asset_service import OutputResult -from google.cloud.asset_v1.types.asset_service import PartitionSpec -from google.cloud.asset_v1.types.asset_service import PubsubDestination -from google.cloud.asset_v1.types.asset_service import QueryAssetsOutputConfig -from google.cloud.asset_v1.types.asset_service import QueryAssetsRequest -from google.cloud.asset_v1.types.asset_service import QueryAssetsResponse -from google.cloud.asset_v1.types.asset_service import QueryResult -from google.cloud.asset_v1.types.asset_service import SavedQuery -from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesRequest -from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesResponse -from google.cloud.asset_v1.types.asset_service import SearchAllResourcesRequest -from google.cloud.asset_v1.types.asset_service import SearchAllResourcesResponse -from google.cloud.asset_v1.types.asset_service import TableFieldSchema -from google.cloud.asset_v1.types.asset_service import TableSchema -from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest -from google.cloud.asset_v1.types.asset_service import UpdateSavedQueryRequest -from google.cloud.asset_v1.types.asset_service import ContentType -from google.cloud.asset_v1.types.assets import Asset -from google.cloud.asset_v1.types.assets import AttachedResource -from google.cloud.asset_v1.types.assets import ConditionEvaluation -from google.cloud.asset_v1.types.assets import EffectiveTagDetails -from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult -from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState -from google.cloud.asset_v1.types.assets import IamPolicySearchResult -from google.cloud.asset_v1.types.assets import RelatedAsset -from google.cloud.asset_v1.types.assets import RelatedAssets -from google.cloud.asset_v1.types.assets import RelatedResource -from google.cloud.asset_v1.types.assets import RelatedResources -from google.cloud.asset_v1.types.assets import RelationshipAttributes -from google.cloud.asset_v1.types.assets import Resource -from google.cloud.asset_v1.types.assets import ResourceSearchResult -from google.cloud.asset_v1.types.assets import Tag -from google.cloud.asset_v1.types.assets import TemporalAsset -from google.cloud.asset_v1.types.assets import TimeWindow -from google.cloud.asset_v1.types.assets import VersionedResource - -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'EffectiveTagDetails', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'Tag', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/py.typed b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/__init__.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/__init__.py deleted file mode 100644 index f8787d310b9f..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/__init__.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.asset_service import AssetServiceClient -from .services.asset_service import AssetServiceAsyncClient - -from .types.asset_service import AnalyzeIamPolicyLongrunningMetadata -from .types.asset_service import AnalyzeIamPolicyLongrunningRequest -from .types.asset_service import AnalyzeIamPolicyLongrunningResponse -from .types.asset_service import AnalyzeIamPolicyRequest -from .types.asset_service import AnalyzeIamPolicyResponse -from .types.asset_service import AnalyzeMoveRequest -from .types.asset_service import AnalyzeMoveResponse -from .types.asset_service import AnalyzeOrgPoliciesRequest -from .types.asset_service import AnalyzeOrgPoliciesResponse -from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest -from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse -from .types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest -from .types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse -from .types.asset_service import AnalyzerOrgPolicy -from .types.asset_service import AnalyzerOrgPolicyConstraint -from .types.asset_service import BatchGetAssetsHistoryRequest -from .types.asset_service import BatchGetAssetsHistoryResponse -from .types.asset_service import BatchGetEffectiveIamPoliciesRequest -from .types.asset_service import BatchGetEffectiveIamPoliciesResponse -from .types.asset_service import BigQueryDestination -from .types.asset_service import CreateFeedRequest -from .types.asset_service import CreateSavedQueryRequest -from .types.asset_service import DeleteFeedRequest -from .types.asset_service import DeleteSavedQueryRequest -from .types.asset_service import ExportAssetsRequest -from .types.asset_service import ExportAssetsResponse -from .types.asset_service import Feed -from .types.asset_service import FeedOutputConfig -from .types.asset_service import GcsDestination -from .types.asset_service import GcsOutputResult -from .types.asset_service import GetFeedRequest -from .types.asset_service import GetSavedQueryRequest -from .types.asset_service import IamPolicyAnalysisOutputConfig -from .types.asset_service import IamPolicyAnalysisQuery -from .types.asset_service import ListAssetsRequest -from .types.asset_service import ListAssetsResponse -from .types.asset_service import ListFeedsRequest -from .types.asset_service import ListFeedsResponse -from .types.asset_service import ListSavedQueriesRequest -from .types.asset_service import ListSavedQueriesResponse -from .types.asset_service import MoveAnalysis -from .types.asset_service import MoveAnalysisResult -from .types.asset_service import MoveImpact -from .types.asset_service import OutputConfig -from .types.asset_service import OutputResult -from .types.asset_service import PartitionSpec -from .types.asset_service import PubsubDestination -from .types.asset_service import QueryAssetsOutputConfig -from .types.asset_service import QueryAssetsRequest -from .types.asset_service import QueryAssetsResponse -from .types.asset_service import QueryResult -from .types.asset_service import SavedQuery -from .types.asset_service import SearchAllIamPoliciesRequest -from .types.asset_service import SearchAllIamPoliciesResponse -from .types.asset_service import SearchAllResourcesRequest -from .types.asset_service import SearchAllResourcesResponse -from .types.asset_service import TableFieldSchema -from .types.asset_service import TableSchema -from .types.asset_service import UpdateFeedRequest -from .types.asset_service import UpdateSavedQueryRequest -from .types.asset_service import ContentType -from .types.assets import Asset -from .types.assets import AttachedResource -from .types.assets import ConditionEvaluation -from .types.assets import EffectiveTagDetails -from .types.assets import IamPolicyAnalysisResult -from .types.assets import IamPolicyAnalysisState -from .types.assets import IamPolicySearchResult -from .types.assets import RelatedAsset -from .types.assets import RelatedAssets -from .types.assets import RelatedResource -from .types.assets import RelatedResources -from .types.assets import RelationshipAttributes -from .types.assets import Resource -from .types.assets import ResourceSearchResult -from .types.assets import Tag -from .types.assets import TemporalAsset -from .types.assets import TimeWindow -from .types.assets import VersionedResource - -__all__ = ( - 'AssetServiceAsyncClient', -'AnalyzeIamPolicyLongrunningMetadata', -'AnalyzeIamPolicyLongrunningRequest', -'AnalyzeIamPolicyLongrunningResponse', -'AnalyzeIamPolicyRequest', -'AnalyzeIamPolicyResponse', -'AnalyzeMoveRequest', -'AnalyzeMoveResponse', -'AnalyzeOrgPoliciesRequest', -'AnalyzeOrgPoliciesResponse', -'AnalyzeOrgPolicyGovernedAssetsRequest', -'AnalyzeOrgPolicyGovernedAssetsResponse', -'AnalyzeOrgPolicyGovernedContainersRequest', -'AnalyzeOrgPolicyGovernedContainersResponse', -'AnalyzerOrgPolicy', -'AnalyzerOrgPolicyConstraint', -'Asset', -'AssetServiceClient', -'AttachedResource', -'BatchGetAssetsHistoryRequest', -'BatchGetAssetsHistoryResponse', -'BatchGetEffectiveIamPoliciesRequest', -'BatchGetEffectiveIamPoliciesResponse', -'BigQueryDestination', -'ConditionEvaluation', -'ContentType', -'CreateFeedRequest', -'CreateSavedQueryRequest', -'DeleteFeedRequest', -'DeleteSavedQueryRequest', -'EffectiveTagDetails', -'ExportAssetsRequest', -'ExportAssetsResponse', -'Feed', -'FeedOutputConfig', -'GcsDestination', -'GcsOutputResult', -'GetFeedRequest', -'GetSavedQueryRequest', -'IamPolicyAnalysisOutputConfig', -'IamPolicyAnalysisQuery', -'IamPolicyAnalysisResult', -'IamPolicyAnalysisState', -'IamPolicySearchResult', -'ListAssetsRequest', -'ListAssetsResponse', -'ListFeedsRequest', -'ListFeedsResponse', -'ListSavedQueriesRequest', -'ListSavedQueriesResponse', -'MoveAnalysis', -'MoveAnalysisResult', -'MoveImpact', -'OutputConfig', -'OutputResult', -'PartitionSpec', -'PubsubDestination', -'QueryAssetsOutputConfig', -'QueryAssetsRequest', -'QueryAssetsResponse', -'QueryResult', -'RelatedAsset', -'RelatedAssets', -'RelatedResource', -'RelatedResources', -'RelationshipAttributes', -'Resource', -'ResourceSearchResult', -'SavedQuery', -'SearchAllIamPoliciesRequest', -'SearchAllIamPoliciesResponse', -'SearchAllResourcesRequest', -'SearchAllResourcesResponse', -'TableFieldSchema', -'TableSchema', -'Tag', -'TemporalAsset', -'TimeWindow', -'UpdateFeedRequest', -'UpdateSavedQueryRequest', -'VersionedResource', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/gapic_metadata.json deleted file mode 100644 index e39e5043139f..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/gapic_metadata.json +++ /dev/null @@ -1,373 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.asset_v1", - "protoPackage": "google.cloud.asset.v1", - "schema": "1.0", - "services": { - "AssetService": { - "clients": { - "grpc": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "AnalyzeIamPolicy": { - "methods": [ - "analyze_iam_policy" - ] - }, - "AnalyzeIamPolicyLongrunning": { - "methods": [ - "analyze_iam_policy_longrunning" - ] - }, - "AnalyzeMove": { - "methods": [ - "analyze_move" - ] - }, - "AnalyzeOrgPolicies": { - "methods": [ - "analyze_org_policies" - ] - }, - "AnalyzeOrgPolicyGovernedAssets": { - "methods": [ - "analyze_org_policy_governed_assets" - ] - }, - "AnalyzeOrgPolicyGovernedContainers": { - "methods": [ - "analyze_org_policy_governed_containers" - ] - }, - "BatchGetAssetsHistory": { - "methods": [ - "batch_get_assets_history" - ] - }, - "BatchGetEffectiveIamPolicies": { - "methods": [ - "batch_get_effective_iam_policies" - ] - }, - "CreateFeed": { - "methods": [ - "create_feed" - ] - }, - "CreateSavedQuery": { - "methods": [ - "create_saved_query" - ] - }, - "DeleteFeed": { - "methods": [ - "delete_feed" - ] - }, - "DeleteSavedQuery": { - "methods": [ - "delete_saved_query" - ] - }, - "ExportAssets": { - "methods": [ - "export_assets" - ] - }, - "GetFeed": { - "methods": [ - "get_feed" - ] - }, - "GetSavedQuery": { - "methods": [ - "get_saved_query" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListFeeds": { - "methods": [ - "list_feeds" - ] - }, - "ListSavedQueries": { - "methods": [ - "list_saved_queries" - ] - }, - "QueryAssets": { - "methods": [ - "query_assets" - ] - }, - "SearchAllIamPolicies": { - "methods": [ - "search_all_iam_policies" - ] - }, - "SearchAllResources": { - "methods": [ - "search_all_resources" - ] - }, - "UpdateFeed": { - "methods": [ - "update_feed" - ] - }, - "UpdateSavedQuery": { - "methods": [ - "update_saved_query" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AssetServiceAsyncClient", - "rpcs": { - "AnalyzeIamPolicy": { - "methods": [ - "analyze_iam_policy" - ] - }, - "AnalyzeIamPolicyLongrunning": { - "methods": [ - "analyze_iam_policy_longrunning" - ] - }, - "AnalyzeMove": { - "methods": [ - "analyze_move" - ] - }, - "AnalyzeOrgPolicies": { - "methods": [ - "analyze_org_policies" - ] - }, - "AnalyzeOrgPolicyGovernedAssets": { - "methods": [ - "analyze_org_policy_governed_assets" - ] - }, - "AnalyzeOrgPolicyGovernedContainers": { - "methods": [ - "analyze_org_policy_governed_containers" - ] - }, - "BatchGetAssetsHistory": { - "methods": [ - "batch_get_assets_history" - ] - }, - "BatchGetEffectiveIamPolicies": { - "methods": [ - "batch_get_effective_iam_policies" - ] - }, - "CreateFeed": { - "methods": [ - "create_feed" - ] - }, - "CreateSavedQuery": { - "methods": [ - "create_saved_query" - ] - }, - "DeleteFeed": { - "methods": [ - "delete_feed" - ] - }, - "DeleteSavedQuery": { - "methods": [ - "delete_saved_query" - ] - }, - "ExportAssets": { - "methods": [ - "export_assets" - ] - }, - "GetFeed": { - "methods": [ - "get_feed" - ] - }, - "GetSavedQuery": { - "methods": [ - "get_saved_query" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListFeeds": { - "methods": [ - "list_feeds" - ] - }, - "ListSavedQueries": { - "methods": [ - "list_saved_queries" - ] - }, - "QueryAssets": { - "methods": [ - "query_assets" - ] - }, - "SearchAllIamPolicies": { - "methods": [ - "search_all_iam_policies" - ] - }, - "SearchAllResources": { - "methods": [ - "search_all_resources" - ] - }, - "UpdateFeed": { - "methods": [ - "update_feed" - ] - }, - "UpdateSavedQuery": { - "methods": [ - "update_saved_query" - ] - } - } - }, - "rest": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "AnalyzeIamPolicy": { - "methods": [ - "analyze_iam_policy" - ] - }, - "AnalyzeIamPolicyLongrunning": { - "methods": [ - "analyze_iam_policy_longrunning" - ] - }, - "AnalyzeMove": { - "methods": [ - "analyze_move" - ] - }, - "AnalyzeOrgPolicies": { - "methods": [ - "analyze_org_policies" - ] - }, - "AnalyzeOrgPolicyGovernedAssets": { - "methods": [ - "analyze_org_policy_governed_assets" - ] - }, - "AnalyzeOrgPolicyGovernedContainers": { - "methods": [ - "analyze_org_policy_governed_containers" - ] - }, - "BatchGetAssetsHistory": { - "methods": [ - "batch_get_assets_history" - ] - }, - "BatchGetEffectiveIamPolicies": { - "methods": [ - "batch_get_effective_iam_policies" - ] - }, - "CreateFeed": { - "methods": [ - "create_feed" - ] - }, - "CreateSavedQuery": { - "methods": [ - "create_saved_query" - ] - }, - "DeleteFeed": { - "methods": [ - "delete_feed" - ] - }, - "DeleteSavedQuery": { - "methods": [ - "delete_saved_query" - ] - }, - "ExportAssets": { - "methods": [ - "export_assets" - ] - }, - "GetFeed": { - "methods": [ - "get_feed" - ] - }, - "GetSavedQuery": { - "methods": [ - "get_saved_query" - ] - }, - "ListAssets": { - "methods": [ - "list_assets" - ] - }, - "ListFeeds": { - "methods": [ - "list_feeds" - ] - }, - "ListSavedQueries": { - "methods": [ - "list_saved_queries" - ] - }, - "QueryAssets": { - "methods": [ - "query_assets" - ] - }, - "SearchAllIamPolicies": { - "methods": [ - "search_all_iam_policies" - ] - }, - "SearchAllResources": { - "methods": [ - "search_all_resources" - ] - }, - "UpdateFeed": { - "methods": [ - "update_feed" - ] - }, - "UpdateSavedQuery": { - "methods": [ - "update_saved_query" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/py.typed b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/__init__.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/__init__.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/__init__.py deleted file mode 100644 index 1ad75a011889..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AssetServiceClient -from .async_client import AssetServiceAsyncClient - -__all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/async_client.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/async_client.py deleted file mode 100644 index 033af42dc288..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/async_client.py +++ /dev/null @@ -1,3307 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.asset_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .client import AssetServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AssetServiceAsyncClient: - """Asset service definition.""" - - _client: AssetServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AssetServiceClient._DEFAULT_UNIVERSE - - access_level_path = staticmethod(AssetServiceClient.access_level_path) - parse_access_level_path = staticmethod(AssetServiceClient.parse_access_level_path) - access_policy_path = staticmethod(AssetServiceClient.access_policy_path) - parse_access_policy_path = staticmethod(AssetServiceClient.parse_access_policy_path) - asset_path = staticmethod(AssetServiceClient.asset_path) - parse_asset_path = staticmethod(AssetServiceClient.parse_asset_path) - feed_path = staticmethod(AssetServiceClient.feed_path) - parse_feed_path = staticmethod(AssetServiceClient.parse_feed_path) - inventory_path = staticmethod(AssetServiceClient.inventory_path) - parse_inventory_path = staticmethod(AssetServiceClient.parse_inventory_path) - saved_query_path = staticmethod(AssetServiceClient.saved_query_path) - parse_saved_query_path = staticmethod(AssetServiceClient.parse_saved_query_path) - service_perimeter_path = staticmethod(AssetServiceClient.service_perimeter_path) - parse_service_perimeter_path = staticmethod(AssetServiceClient.parse_service_perimeter_path) - common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AssetServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AssetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AssetServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) - common_location_path = staticmethod(AssetServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AssetServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AssetServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1.AssetService", - "credentialsType": None, - } - ) - - async def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports assets with time and resource types to a given Cloud - Storage location/BigQuery table. For Cloud Storage location - destinations, the output format is newline-delimited JSON. Each - line represents a - [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in - the JSON format; for BigQuery table destinations, the output - table stores the fields in asset Protobuf as columns. This API - implements the - [google.longrunning.Operation][google.longrunning.Operation] - API, which allows you to keep track of the export. We recommend - intervals of at least 2 seconds with exponential retry to poll - the export operation result. For regular-size resource parent, - the export operation usually finishes within 5 minutes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_export_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - output_config = asset_v1.OutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.ExportAssetsRequest( - parent="parent_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_assets(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]]): - The request object. Export asset request. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.asset_v1.types.ExportAssetsResponse` The export asset response. This message is returned by the - [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] - method in the returned - [google.longrunning.Operation.response][google.longrunning.Operation.response] - field. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ExportAssetsRequest): - request = asset_service.ExportAssetsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - asset_service.ExportAssetsResponse, - metadata_type=asset_service.ExportAssetsRequest, - ) - - # Done; return the response. - return response - - async def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsAsyncPager: - r"""Lists assets with time and resource types and returns - paged results in response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_list_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]]): - The request object. ListAssets request. - parent (:class:`str`): - Required. Name of the organization, folder, or project - the assets belong to. Format: - "organizations/[organization-number]" (such as - "organizations/123"), "projects/[project-id]" (such as - "projects/my-project-id"), "projects/[project-number]" - (such as "projects/12345"), or "folders/[folder-number]" - (such as "folders/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager: - ListAssets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListAssetsRequest): - request = asset_service.ListAssetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: - r"""Batch gets the update history of assets that overlap a time - window. For IAM_POLICY content, this API outputs history when - the asset and its attached IAM POLICY both exist. This can - create gaps in the output history. Otherwise, this API outputs - history with asset in both non-delete or deleted status. If a - specified asset does not exist, this API returns an - INVALID_ARGUMENT error. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_batch_get_assets_history(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetAssetsHistoryRequest( - parent="parent_value", - ) - - # Make the request - response = await client.batch_get_assets_history(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]]): - The request object. Batch get assets history request. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: - Batch get assets history response. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.BatchGetAssetsHistoryRequest): - request = asset_service.BatchGetAssetsHistoryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_assets_history] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_create_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = await client.create_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]]): - The request object. Create asset feed request. - parent (:class:`str`): - Required. The name of the - project/folder/organization where this - feed should be created in. It can only - be an organization number (such as - "organizations/123"), a folder number - (such as "folders/123"), a project ID - (such as "projects/my-project-id"), or a - project number (such as - "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.CreateFeedRequest): - request = asset_service.CreateFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Gets details about an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_get_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = await client.get_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.GetFeedRequest, dict]]): - The request object. Get asset feed request. - name (:class:`str`): - Required. The name of the Feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.GetFeedRequest): - request = asset_service.GetFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: - r"""Lists all asset feeds in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_list_feeds(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_feeds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]]): - The request object. List asset feeds request. - parent (:class:`str`): - Required. The parent - project/folder/organization whose feeds - are to be listed. It can only be using - project/folder/organization number (such - as "folders/12345")", or a project ID - (such as "projects/my-project-id"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.ListFeedsResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListFeedsRequest): - request = asset_service.ListFeedsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_feeds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Updates an asset feed configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_update_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = await client.update_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]]): - The request object. Update asset feed request. - feed (:class:`google.cloud.asset_v1.types.Feed`): - Required. The new values of feed details. It must match - an existing feed and the field ``name`` must be in the - format of: projects/project_number/feeds/feed_id or - folders/folder_number/feeds/feed_id or - organizations/organization_number/feeds/feed_id. - - This corresponds to the ``feed`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.UpdateFeedRequest): - request = asset_service.UpdateFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if feed is not None: - request.feed = feed - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_delete_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - await client.delete_feed(request=request) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]]): - The request object. - name (:class:`str`): - Required. The name of the feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.DeleteFeedRequest): - request = asset_service.DeleteFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesAsyncPager: - r"""Searches all Google Cloud resources within the specified scope, - such as a project, folder, or organization. The caller must be - granted the ``cloudasset.assets.searchAllResources`` permission - on the desired scope, otherwise the request will be rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_search_all_resources(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]]): - The request object. Search all resources request. - scope (:class:`str`): - Required. A scope can be a project, a folder, or an - organization. The search is limited to the resources - within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllResources`` `__ - permission on the desired scope. - - The allowed values are: - - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Optional. The query statement. See `how to construct a - query `__ - for more information. If not specified or empty, it will - search all the resources within the specified ``scope``. - - Examples: - - - ``name:Important`` to find Google Cloud resources - whose name contains ``Important`` as a word. - - ``name=Important`` to find the Google Cloud resource - whose name is exactly ``Important``. - - ``displayName:Impor*`` to find Google Cloud resources - whose display name contains ``Impor`` as a prefix of - any word in the field. - - ``location:us-west*`` to find Google Cloud resources - whose location contains both ``us`` and ``west`` as - prefixes. - - ``labels:prod`` to find Google Cloud resources whose - labels contain ``prod`` as a key or value. - - ``labels.env:prod`` to find Google Cloud resources - that have a label ``env`` and its value is ``prod``. - - ``labels.env:*`` to find Google Cloud resources that - have a label ``env``. - - ``tagKeys:env`` to find Google Cloud resources that - have directly attached tags where the - ```TagKey.namespacedName`` `__ - contains ``env``. - - ``tagValues:prod*`` to find Google Cloud resources - that have directly attached tags where the - ```TagValue.namespacedName`` `__ - contains a word prefixed by ``prod``. - - ``tagValueIds=tagValues/123`` to find Google Cloud - resources that have directly attached tags where the - ```TagValue.name`` `__ - is exactly ``tagValues/123``. - - ``effectiveTagKeys:env`` to find Google Cloud - resources that have directly attached or inherited - tags where the - ```TagKey.namespacedName`` `__ - contains ``env``. - - ``effectiveTagValues:prod*`` to find Google Cloud - resources that have directly attached or inherited - tags where the - ```TagValue.namespacedName`` `__ - contains a word prefixed by ``prod``. - - ``effectiveTagValueIds=tagValues/123`` to find Google - Cloud resources that have directly attached or - inherited tags where the - ```TagValue.name`` `__ - is exactly ``tagValues/123``. - - ``kmsKey:key`` to find Google Cloud resources - encrypted with a customer-managed encryption key - whose name contains ``key`` as a word. This field is - deprecated. Use the ``kmsKeys`` field to retrieve - Cloud KMS key information. - - ``kmsKeys:key`` to find Google Cloud resources - encrypted with customer-managed encryption keys whose - name contains the word ``key``. - - ``relationships:instance-group-1`` to find Google - Cloud resources that have relationships with - ``instance-group-1`` in the related resource name. - - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find - Compute Engine instances that have relationships of - type ``INSTANCE_TO_INSTANCEGROUP``. - - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` - to find Compute Engine instances that have - relationships with ``instance-group-1`` in the - Compute Engine instance group resource name, for - relationship type ``INSTANCE_TO_INSTANCEGROUP``. - - ``sccSecurityMarks.key=value`` to find Cloud - resources that are attached with security marks whose - key is ``key`` and value is ``value``. - - ``sccSecurityMarks.key:*`` to find Cloud resources - that are attached with security marks whose key is - ``key``. - - ``state:ACTIVE`` to find Google Cloud resources whose - state contains ``ACTIVE`` as a word. - - ``NOT state:ACTIVE`` to find Google Cloud resources - whose state doesn't contain ``ACTIVE`` as a word. - - ``createTime<1609459200`` to find Google Cloud - resources that were created before - ``2021-01-01 00:00:00 UTC``. ``1609459200`` is the - epoch timestamp of ``2021-01-01 00:00:00 UTC`` in - seconds. - - ``updateTime>1609459200`` to find Google Cloud - resources that were updated after - ``2021-01-01 00:00:00 UTC``. ``1609459200`` is the - epoch timestamp of ``2021-01-01 00:00:00 UTC`` in - seconds. - - ``Important`` to find Google Cloud resources that - contain ``Important`` as a word in any of the - searchable fields. - - ``Impor*`` to find Google Cloud resources that - contain ``Impor`` as a prefix of any word in any of - the searchable fields. - - ``Important location:(us-west1 OR global)`` to find - Google Cloud resources that contain ``Important`` as - a word in any of the searchable fields and are also - located in the ``us-west1`` region or the ``global`` - location. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_types (:class:`MutableSequence[str]`): - Optional. A list of asset types that this request - searches for. If empty, it will search all the asset - types `supported by search - APIs `__. - - Regular expressions are also supported. For example: - - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type - ends with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". - - See `RE2 `__ - for all supported regular expression syntax. If the - regular expression does not match any supported asset - type, an INVALID_ARGUMENT error will be returned. - - This corresponds to the ``asset_types`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager: - Search all resources response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllResourcesRequest): - request = asset_service.SearchAllResourcesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - if asset_types: - request.asset_types.extend(asset_types) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchAllResourcesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesAsyncPager: - r"""Searches all IAM policies within the specified scope, such as a - project, folder, or organization. The caller must be granted the - ``cloudasset.assets.searchAllIamPolicies`` permission on the - desired scope, otherwise the request will be rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_search_all_iam_policies(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]]): - The request object. Search all IAM policies request. - scope (:class:`str`): - Required. A scope can be a project, a folder, or an - organization. The search is limited to the IAM policies - within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllIamPolicies`` `__ - permission on the desired scope. - - The allowed values are: - - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Optional. The query statement. See `how to construct a - query `__ - for more information. If not specified or empty, it will - search all the IAM policies within the specified - ``scope``. Note that the query string is compared - against each IAM policy binding, including its - principals, roles, and IAM conditions. The returned IAM - policies will only contain the bindings that match your - query. To learn more about the IAM policy structure, see - the `IAM policy - documentation `__. - - Examples: - - - ``policy:amy@gmail.com`` to find IAM policy bindings - that specify user "amy@gmail.com". - - ``policy:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``policy:comp*`` to find IAM policy bindings that - contain "comp" as a prefix of any word in the - binding. - - ``policy.role.permissions:storage.buckets.update`` to - find IAM policy bindings that specify a role - containing "storage.buckets.update" permission. Note - that if callers don't have ``iam.roles.get`` access - to a role's included permissions, policy bindings - that specify this role will be dropped from the - search results. - - ``policy.role.permissions:upd*`` to find IAM policy - bindings that specify a role containing "upd" as a - prefix of any word in the role permission. Note that - if callers don't have ``iam.roles.get`` access to a - role's included permissions, policy bindings that - specify this role will be dropped from the search - results. - - ``resource:organizations/123456`` to find IAM policy - bindings that are set on "organizations/123456". - - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` - to find IAM policy bindings that are set on the - project named "myproject". - - ``Important`` to find IAM policy bindings that - contain "Important" as a word in any of the - searchable fields (except for the included - permissions). - - ``resource:(instance1 OR instance2) policy:amy`` to - find IAM policy bindings that are set on resources - "instance1" or "instance2" and also specify user - "amy". - - ``roles:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``memberTypes:user`` to find IAM policy bindings that - contain the principal type "user". - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: - Search all IAM policies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): - request = asset_service.SearchAllIamPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_iam_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchAllIamPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: - r"""Analyzes IAM policies to answer which identities have - what accesses on which resources. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_analyze_iam_policy(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - request = asset_v1.AnalyzeIamPolicyRequest( - analysis_query=analysis_query, - ) - - # Make the request - response = await client.analyze_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]]): - The request object. A request message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: - A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeIamPolicyRequest): - request = asset_service.AnalyzeIamPolicyRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Analyzes IAM policies asynchronously to answer which identities - have what accesses on which resources, and writes the analysis - results to a Google Cloud Storage or a BigQuery destination. For - Cloud Storage destination, the output format is the JSON format - that represents a - [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. - This method implements the - [google.longrunning.Operation][google.longrunning.Operation], - which allows you to track the operation status. We recommend - intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the metadata - for the long-running operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_analyze_iam_policy_longrunning(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - output_config = asset_v1.IamPolicyAnalysisOutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.AnalyzeIamPolicyLongrunningRequest( - analysis_query=analysis_query, - output_config=output_config, - ) - - # Make the request - operation = client.analyze_iam_policy_longrunning(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]]): - The request object. A request message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeIamPolicyLongrunningRequest): - request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy_longrunning] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - asset_service.AnalyzeIamPolicyLongrunningResponse, - metadata_type=asset_service.AnalyzeIamPolicyLongrunningMetadata, - ) - - # Done; return the response. - return response - - async def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: - r"""Analyze moving a resource to a specified destination - without kicking off the actual move. The analysis is - best effort depending on the user's permissions of - viewing different hierarchical policies and - configurations. The policies and configuration are - subject to change before the actual resource migration - takes place. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_analyze_move(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeMoveRequest( - resource="resource_value", - destination_parent="destination_parent_value", - ) - - # Make the request - response = await client.analyze_move(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]]): - The request object. The request message for performing - resource move analysis. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.AnalyzeMoveResponse: - The response message for resource - move analysis. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeMoveRequest): - request = asset_service.AnalyzeMoveRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_move] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: - r"""Issue a job that queries assets using a SQL statement compatible - with `BigQuery - SQL `__. - - If the query execution finishes within timeout and there's no - pagination, the full query results will be returned in the - ``QueryAssetsResponse``. - - Otherwise, full query results can be obtained by issuing extra - requests with the ``job_reference`` from the a previous - ``QueryAssets`` call. - - Note, the query result has approximately 10 GB limitation - enforced by - `BigQuery `__. - Queries return larger results will result in errors. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_query_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.QueryAssetsRequest( - statement="statement_value", - parent="parent_value", - ) - - # Make the request - response = await client.query_assets(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]]): - The request object. QueryAssets request. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.QueryAssetsResponse: - QueryAssets response. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.QueryAssetsRequest): - request = asset_service.QueryAssetsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.query_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: - r"""Creates a saved query in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_create_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.CreateSavedQueryRequest( - parent="parent_value", - saved_query_id="saved_query_id_value", - ) - - # Make the request - response = await client.create_saved_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.CreateSavedQueryRequest, dict]]): - The request object. Request to create a saved query. - parent (:class:`str`): - Required. The name of the project/folder/organization - where this saved_query should be created in. It can only - be an organization number (such as "organizations/123"), - a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id"), or a project number - (such as "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - saved_query (:class:`google.cloud.asset_v1.types.SavedQuery`): - Required. The saved_query details. The ``name`` field - must be empty as it will be generated based on the - parent and saved_query_id. - - This corresponds to the ``saved_query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - saved_query_id (:class:`str`): - Required. The ID to use for the saved query, which must - be unique in the specified parent. It will become the - final component of the saved query's resource name. - - This value should be 4-63 characters, and valid - characters are ``[a-z][0-9]-``. - - Notice that this field is required in the saved query - creation, and the ``name`` field of the ``saved_query`` - will be ignored. - - This corresponds to the ``saved_query_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, saved_query, saved_query_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.CreateSavedQueryRequest): - request = asset_service.CreateSavedQueryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if saved_query is not None: - request.saved_query = saved_query - if saved_query_id is not None: - request.saved_query_id = saved_query_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: - r"""Gets details about a saved query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_get_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.GetSavedQueryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_saved_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.GetSavedQueryRequest, dict]]): - The request object. Request to get a saved query. - name (:class:`str`): - Required. The name of the saved query and it must be in - the format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.GetSavedQueryRequest): - request = asset_service.GetSavedQueryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesAsyncPager: - r"""Lists all saved queries in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_list_saved_queries(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.ListSavedQueriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_saved_queries(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.ListSavedQueriesRequest, dict]]): - The request object. Request to list saved queries. - parent (:class:`str`): - Required. The parent - project/folder/organization whose - savedQueries are to be listed. It can - only be using - project/folder/organization number (such - as "folders/12345")", or a project ID - (such as "projects/my-project-id"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager: - Response of listing saved queries. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListSavedQueriesRequest): - request = asset_service.ListSavedQueriesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_saved_queries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSavedQueriesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: - r"""Updates a saved query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_update_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) - - # Make the request - response = await client.update_saved_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.UpdateSavedQueryRequest, dict]]): - The request object. Request to update a saved query. - saved_query (:class:`google.cloud.asset_v1.types.SavedQuery`): - Required. The saved query to update. - - The saved query's ``name`` field is used to identify the - one to update, which has format as below: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - This corresponds to the ``saved_query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [saved_query, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.UpdateSavedQueryRequest): - request = asset_service.UpdateSavedQueryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if saved_query is not None: - request.saved_query = saved_query - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("saved_query.name", request.saved_query.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a saved query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_delete_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.DeleteSavedQueryRequest( - name="name_value", - ) - - # Make the request - await client.delete_saved_query(request=request) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.DeleteSavedQueryRequest, dict]]): - The request object. Request to delete a saved query. - name (:class:`str`): - Required. The name of the saved query to delete. It must - be in the format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.DeleteSavedQueryRequest): - request = asset_service.DeleteSavedQueryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: - r"""Gets effective IAM policies for a batch of resources. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_batch_get_effective_iam_policies(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetEffectiveIamPoliciesRequest( - scope="scope_value", - names=['names_value1', 'names_value2'], - ) - - # Make the request - response = await client.batch_get_effective_iam_policies(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]]): - The request object. A request message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: - A response message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.BatchGetEffectiveIamPoliciesRequest): - request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_effective_iam_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesAsyncPager: - r"""Analyzes organization policies under a scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_analyze_org_policies(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPoliciesRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest, dict]]): - The request object. A request message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - scope (:class:`str`): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - constraint (:class:`str`): - Required. The name of the constraint - to analyze organization policies for. - The response only contains analyzed - organization policies for the provided - constraint. - - This corresponds to the ``constraint`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - The expression to filter - [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - Filtering is currently available for bare literal values - and the following fields: - - - consolidated_policy.attached_resource - - consolidated_policy.rules.enforce - - When filtering by a specific field, the only supported - operator is ``=``. For example, filtering by - consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return all the Organization Policy results attached - to "folders/001". - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager: - The response message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPoliciesRequest): - request = asset_service.AnalyzeOrgPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if constraint is not None: - request.constraint = constraint - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.AnalyzeOrgPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: - r"""Analyzes organization policies governed containers - (projects, folders or organization) under a scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_analyze_org_policy_governed_containers(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_containers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest, dict]]): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - scope (:class:`str`): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - The output containers will also be limited to the ones - governed by those in-scope organization policies. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - constraint (:class:`str`): - Required. The name of the constraint - to analyze governed containers for. The - analysis only contains organization - policies for the provided constraint. - - This corresponds to the ``constraint`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - The expression to filter - [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. - Filtering is currently available for bare literal values - and the following fields: - - - parent - - consolidated_policy.rules.enforce - - When filtering by a specific field, the only supported - operator is ``=``. For example, filtering by - parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all the containers under "folders/001". - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if constraint is not None: - request.constraint = constraint - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_containers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: - r"""Analyzes organization policies governed assets (Google Cloud - resources or policies) under a scope. This RPC supports custom - constraints and the following canned constraints: - - - constraints/ainotebooks.accessMode - - constraints/ainotebooks.disableFileDownloads - - constraints/ainotebooks.disableRootAccess - - constraints/ainotebooks.disableTerminal - - constraints/ainotebooks.environmentOptions - - constraints/ainotebooks.requireAutoUpgradeSchedule - - constraints/ainotebooks.restrictVpcNetworks - - constraints/compute.disableGuestAttributesAccess - - constraints/compute.disableInstanceDataAccessApis - - constraints/compute.disableNestedVirtualization - - constraints/compute.disableSerialPortAccess - - constraints/compute.disableSerialPortLogging - - constraints/compute.disableVpcExternalIpv6 - - constraints/compute.requireOsLogin - - constraints/compute.requireShieldedVm - - constraints/compute.restrictLoadBalancerCreationForTypes - - constraints/compute.restrictProtocolForwardingCreationForTypes - - constraints/compute.restrictXpnProjectLienRemoval - - constraints/compute.setNewProjectDefaultToZonalDNSOnly - - constraints/compute.skipDefaultNetworkCreation - - constraints/compute.trustedImageProjects - - constraints/compute.vmCanIpForward - - constraints/compute.vmExternalIpAccess - - constraints/gcp.detailedAuditLoggingMode - - constraints/gcp.resourceLocations - - constraints/iam.allowedPolicyMemberDomains - - constraints/iam.automaticIamGrantsForDefaultServiceAccounts - - constraints/iam.disableServiceAccountCreation - - constraints/iam.disableServiceAccountKeyCreation - - constraints/iam.disableServiceAccountKeyUpload - - constraints/iam.restrictCrossProjectServiceAccountLienRemoval - - constraints/iam.serviceAccountKeyExpiryHours - - constraints/resourcemanager.accessBoundaries - - constraints/resourcemanager.allowedExportDestinations - - constraints/sql.restrictAuthorizedNetworks - - constraints/sql.restrictNoncompliantDiagnosticDataAccess - - constraints/sql.restrictNoncompliantResourceCreation - - constraints/sql.restrictPublicIp - - constraints/storage.publicAccessPrevention - - constraints/storage.restrictAuthTypes - - constraints/storage.uniformBucketLevelAccess - - This RPC only returns either resources of types `supported by - search - APIs `__ - or IAM policies. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - async def sample_analyze_org_policy_governed_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest, dict]]): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - scope (:class:`str`): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - The output assets will also be limited to the ones - governed by those in-scope organization policies. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - constraint (:class:`str`): - Required. The name of the constraint - to analyze governed assets for. The - analysis only contains analyzed - organization policies for the provided - constraint. - - This corresponds to the ``constraint`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - The expression to filter - [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. - - For governed resources, filtering is currently available - for bare literal values and the following fields: - - - governed_resource.project - - governed_resource.folders - - consolidated_policy.rules.enforce When filtering by - ``governed_resource.project`` or - ``consolidated_policy.rules.enforce``, the only - supported operator is ``=``. When filtering by - ``governed_resource.folders``, the supported - operators are ``=`` and ``:``. For example, filtering - by ``governed_resource.project="projects/12345678"`` - will return all the governed resources under - "projects/12345678", including the project itself if - applicable. - - For governed IAM policies, filtering is currently - available for bare literal values and the following - fields: - - - governed_iam_policy.project - - governed_iam_policy.folders - - consolidated_policy.rules.enforce When filtering by - ``governed_iam_policy.project`` or - ``consolidated_policy.rules.enforce``, the only - supported operator is ``=``. When filtering by - ``governed_iam_policy.folders``, the supported - operators are ``=`` and ``:``. For example, filtering - by ``governed_iam_policy.folders:"folders/12345678"`` - will return all the governed IAM policies under - "folders/001". - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if constraint is not None: - request.constraint = constraint - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AssetServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/client.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/client.py deleted file mode 100644 index 9cea21d4cfb5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/client.py +++ /dev/null @@ -1,3713 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.asset_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AssetServiceGrpcTransport -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .transports.rest import AssetServiceRestTransport - - -class AssetServiceClientMeta(type): - """Metaclass for the AssetService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] - _transport_registry["grpc"] = AssetServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AssetServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AssetServiceClient(metaclass=AssetServiceClientMeta): - """Asset service definition.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "cloudasset.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def access_level_path(access_policy: str,access_level: str,) -> str: - """Returns a fully-qualified access_level string.""" - return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) - - @staticmethod - def parse_access_level_path(path: str) -> Dict[str,str]: - """Parses a access_level path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def access_policy_path(access_policy: str,) -> str: - """Returns a fully-qualified access_policy string.""" - return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) - - @staticmethod - def parse_access_policy_path(path: str) -> Dict[str,str]: - """Parses a access_policy path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def asset_path() -> str: - """Returns a fully-qualified asset string.""" - return "*".format() - - @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: - """Parses a asset path into its component segments.""" - m = re.match(r"^.*$", path) - return m.groupdict() if m else {} - - @staticmethod - def feed_path(project: str,feed: str,) -> str: - """Returns a fully-qualified feed string.""" - return "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) - - @staticmethod - def parse_feed_path(path: str) -> Dict[str,str]: - """Parses a feed path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def inventory_path(project: str,location: str,instance: str,) -> str: - """Returns a fully-qualified inventory string.""" - return "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) - - @staticmethod - def parse_inventory_path(path: str) -> Dict[str,str]: - """Parses a inventory path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", path) - return m.groupdict() if m else {} - - @staticmethod - def saved_query_path(project: str,saved_query: str,) -> str: - """Returns a fully-qualified saved_query string.""" - return "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) - - @staticmethod - def parse_saved_query_path(path: str) -> Dict[str,str]: - """Parses a saved_query path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: - """Returns a fully-qualified service_perimeter string.""" - return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) - - @staticmethod - def parse_service_perimeter_path(path: str) -> Dict[str,str]: - """Parses a service_perimeter path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AssetServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AssetServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() - self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AssetServiceTransport) - if transport_provided: - # transport is a AssetServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AssetServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( - AssetServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AssetServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1.AssetServiceClient`.", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1.AssetService", - "credentialsType": None, - } - ) - - def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Exports assets with time and resource types to a given Cloud - Storage location/BigQuery table. For Cloud Storage location - destinations, the output format is newline-delimited JSON. Each - line represents a - [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in - the JSON format; for BigQuery table destinations, the output - table stores the fields in asset Protobuf as columns. This API - implements the - [google.longrunning.Operation][google.longrunning.Operation] - API, which allows you to keep track of the export. We recommend - intervals of at least 2 seconds with exponential retry to poll - the export operation result. For regular-size resource parent, - the export operation usually finishes within 5 minutes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_export_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - output_config = asset_v1.OutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.ExportAssetsRequest( - parent="parent_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_assets(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]): - The request object. Export asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.asset_v1.types.ExportAssetsResponse` The export asset response. This message is returned by the - [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] - method in the returned - [google.longrunning.Operation.response][google.longrunning.Operation.response] - field. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ExportAssetsRequest): - request = asset_service.ExportAssetsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - asset_service.ExportAssetsResponse, - metadata_type=asset_service.ExportAssetsRequest, - ) - - # Done; return the response. - return response - - def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsPager: - r"""Lists assets with time and resource types and returns - paged results in response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_list_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): - The request object. ListAssets request. - parent (str): - Required. Name of the organization, folder, or project - the assets belong to. Format: - "organizations/[organization-number]" (such as - "organizations/123"), "projects/[project-id]" (such as - "projects/my-project-id"), "projects/[project-number]" - (such as "projects/12345"), or "folders/[folder-number]" - (such as "folders/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager: - ListAssets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListAssetsRequest): - request = asset_service.ListAssetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: - r"""Batch gets the update history of assets that overlap a time - window. For IAM_POLICY content, this API outputs history when - the asset and its attached IAM POLICY both exist. This can - create gaps in the output history. Otherwise, this API outputs - history with asset in both non-delete or deleted status. If a - specified asset does not exist, this API returns an - INVALID_ARGUMENT error. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_batch_get_assets_history(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetAssetsHistoryRequest( - parent="parent_value", - ) - - # Make the request - response = client.batch_get_assets_history(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]): - The request object. Batch get assets history request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: - Batch get assets history response. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.BatchGetAssetsHistoryRequest): - request = asset_service.BatchGetAssetsHistoryRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_get_assets_history] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_create_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = client.create_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]): - The request object. Create asset feed request. - parent (str): - Required. The name of the - project/folder/organization where this - feed should be created in. It can only - be an organization number (such as - "organizations/123"), a folder number - (such as "folders/123"), a project ID - (such as "projects/my-project-id"), or a - project number (such as - "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.CreateFeedRequest): - request = asset_service.CreateFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Gets details about an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_get_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = client.get_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.GetFeedRequest, dict]): - The request object. Get asset feed request. - name (str): - Required. The name of the Feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.GetFeedRequest): - request = asset_service.GetFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: - r"""Lists all asset feeds in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_list_feeds(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_feeds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]): - The request object. List asset feeds request. - parent (str): - Required. The parent - project/folder/organization whose feeds - are to be listed. It can only be using - project/folder/organization number (such - as "folders/12345")", or a project ID - (such as "projects/my-project-id"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.ListFeedsResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListFeedsRequest): - request = asset_service.ListFeedsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_feeds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Updates an asset feed configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_update_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = client.update_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]): - The request object. Update asset feed request. - feed (google.cloud.asset_v1.types.Feed): - Required. The new values of feed details. It must match - an existing feed and the field ``name`` must be in the - format of: projects/project_number/feeds/feed_id or - folders/folder_number/feeds/feed_id or - organizations/organization_number/feeds/feed_id. - - This corresponds to the ``feed`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.UpdateFeedRequest): - request = asset_service.UpdateFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if feed is not None: - request.feed = feed - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_delete_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - client.delete_feed(request=request) - - Args: - request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): - The request object. - name (str): - Required. The name of the feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.DeleteFeedRequest): - request = asset_service.DeleteFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesPager: - r"""Searches all Google Cloud resources within the specified scope, - such as a project, folder, or organization. The caller must be - granted the ``cloudasset.assets.searchAllResources`` permission - on the desired scope, otherwise the request will be rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_search_all_resources(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]): - The request object. Search all resources request. - scope (str): - Required. A scope can be a project, a folder, or an - organization. The search is limited to the resources - within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllResources`` `__ - permission on the desired scope. - - The allowed values are: - - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Optional. The query statement. See `how to construct a - query `__ - for more information. If not specified or empty, it will - search all the resources within the specified ``scope``. - - Examples: - - - ``name:Important`` to find Google Cloud resources - whose name contains ``Important`` as a word. - - ``name=Important`` to find the Google Cloud resource - whose name is exactly ``Important``. - - ``displayName:Impor*`` to find Google Cloud resources - whose display name contains ``Impor`` as a prefix of - any word in the field. - - ``location:us-west*`` to find Google Cloud resources - whose location contains both ``us`` and ``west`` as - prefixes. - - ``labels:prod`` to find Google Cloud resources whose - labels contain ``prod`` as a key or value. - - ``labels.env:prod`` to find Google Cloud resources - that have a label ``env`` and its value is ``prod``. - - ``labels.env:*`` to find Google Cloud resources that - have a label ``env``. - - ``tagKeys:env`` to find Google Cloud resources that - have directly attached tags where the - ```TagKey.namespacedName`` `__ - contains ``env``. - - ``tagValues:prod*`` to find Google Cloud resources - that have directly attached tags where the - ```TagValue.namespacedName`` `__ - contains a word prefixed by ``prod``. - - ``tagValueIds=tagValues/123`` to find Google Cloud - resources that have directly attached tags where the - ```TagValue.name`` `__ - is exactly ``tagValues/123``. - - ``effectiveTagKeys:env`` to find Google Cloud - resources that have directly attached or inherited - tags where the - ```TagKey.namespacedName`` `__ - contains ``env``. - - ``effectiveTagValues:prod*`` to find Google Cloud - resources that have directly attached or inherited - tags where the - ```TagValue.namespacedName`` `__ - contains a word prefixed by ``prod``. - - ``effectiveTagValueIds=tagValues/123`` to find Google - Cloud resources that have directly attached or - inherited tags where the - ```TagValue.name`` `__ - is exactly ``tagValues/123``. - - ``kmsKey:key`` to find Google Cloud resources - encrypted with a customer-managed encryption key - whose name contains ``key`` as a word. This field is - deprecated. Use the ``kmsKeys`` field to retrieve - Cloud KMS key information. - - ``kmsKeys:key`` to find Google Cloud resources - encrypted with customer-managed encryption keys whose - name contains the word ``key``. - - ``relationships:instance-group-1`` to find Google - Cloud resources that have relationships with - ``instance-group-1`` in the related resource name. - - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find - Compute Engine instances that have relationships of - type ``INSTANCE_TO_INSTANCEGROUP``. - - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` - to find Compute Engine instances that have - relationships with ``instance-group-1`` in the - Compute Engine instance group resource name, for - relationship type ``INSTANCE_TO_INSTANCEGROUP``. - - ``sccSecurityMarks.key=value`` to find Cloud - resources that are attached with security marks whose - key is ``key`` and value is ``value``. - - ``sccSecurityMarks.key:*`` to find Cloud resources - that are attached with security marks whose key is - ``key``. - - ``state:ACTIVE`` to find Google Cloud resources whose - state contains ``ACTIVE`` as a word. - - ``NOT state:ACTIVE`` to find Google Cloud resources - whose state doesn't contain ``ACTIVE`` as a word. - - ``createTime<1609459200`` to find Google Cloud - resources that were created before - ``2021-01-01 00:00:00 UTC``. ``1609459200`` is the - epoch timestamp of ``2021-01-01 00:00:00 UTC`` in - seconds. - - ``updateTime>1609459200`` to find Google Cloud - resources that were updated after - ``2021-01-01 00:00:00 UTC``. ``1609459200`` is the - epoch timestamp of ``2021-01-01 00:00:00 UTC`` in - seconds. - - ``Important`` to find Google Cloud resources that - contain ``Important`` as a word in any of the - searchable fields. - - ``Impor*`` to find Google Cloud resources that - contain ``Impor`` as a prefix of any word in any of - the searchable fields. - - ``Important location:(us-west1 OR global)`` to find - Google Cloud resources that contain ``Important`` as - a word in any of the searchable fields and are also - located in the ``us-west1`` region or the ``global`` - location. - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_types (MutableSequence[str]): - Optional. A list of asset types that this request - searches for. If empty, it will search all the asset - types `supported by search - APIs `__. - - Regular expressions are also supported. For example: - - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type - ends with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". - - See `RE2 `__ - for all supported regular expression syntax. If the - regular expression does not match any supported asset - type, an INVALID_ARGUMENT error will be returned. - - This corresponds to the ``asset_types`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager: - Search all resources response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllResourcesRequest): - request = asset_service.SearchAllResourcesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - if asset_types is not None: - request.asset_types = asset_types - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_all_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchAllResourcesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesPager: - r"""Searches all IAM policies within the specified scope, such as a - project, folder, or organization. The caller must be granted the - ``cloudasset.assets.searchAllIamPolicies`` permission on the - desired scope, otherwise the request will be rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_search_all_iam_policies(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]): - The request object. Search all IAM policies request. - scope (str): - Required. A scope can be a project, a folder, or an - organization. The search is limited to the IAM policies - within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllIamPolicies`` `__ - permission on the desired scope. - - The allowed values are: - - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Optional. The query statement. See `how to construct a - query `__ - for more information. If not specified or empty, it will - search all the IAM policies within the specified - ``scope``. Note that the query string is compared - against each IAM policy binding, including its - principals, roles, and IAM conditions. The returned IAM - policies will only contain the bindings that match your - query. To learn more about the IAM policy structure, see - the `IAM policy - documentation `__. - - Examples: - - - ``policy:amy@gmail.com`` to find IAM policy bindings - that specify user "amy@gmail.com". - - ``policy:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``policy:comp*`` to find IAM policy bindings that - contain "comp" as a prefix of any word in the - binding. - - ``policy.role.permissions:storage.buckets.update`` to - find IAM policy bindings that specify a role - containing "storage.buckets.update" permission. Note - that if callers don't have ``iam.roles.get`` access - to a role's included permissions, policy bindings - that specify this role will be dropped from the - search results. - - ``policy.role.permissions:upd*`` to find IAM policy - bindings that specify a role containing "upd" as a - prefix of any word in the role permission. Note that - if callers don't have ``iam.roles.get`` access to a - role's included permissions, policy bindings that - specify this role will be dropped from the search - results. - - ``resource:organizations/123456`` to find IAM policy - bindings that are set on "organizations/123456". - - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` - to find IAM policy bindings that are set on the - project named "myproject". - - ``Important`` to find IAM policy bindings that - contain "Important" as a word in any of the - searchable fields (except for the included - permissions). - - ``resource:(instance1 OR instance2) policy:amy`` to - find IAM policy bindings that are set on resources - "instance1" or "instance2" and also specify user - "amy". - - ``roles:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``memberTypes:user`` to find IAM policy bindings that - contain the principal type "user". - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager: - Search all IAM policies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): - request = asset_service.SearchAllIamPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_all_iam_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchAllIamPoliciesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: - r"""Analyzes IAM policies to answer which identities have - what accesses on which resources. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_analyze_iam_policy(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - request = asset_v1.AnalyzeIamPolicyRequest( - analysis_query=analysis_query, - ) - - # Make the request - response = client.analyze_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]): - The request object. A request message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: - A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeIamPolicyRequest): - request = asset_service.AnalyzeIamPolicyRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Analyzes IAM policies asynchronously to answer which identities - have what accesses on which resources, and writes the analysis - results to a Google Cloud Storage or a BigQuery destination. For - Cloud Storage destination, the output format is the JSON format - that represents a - [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. - This method implements the - [google.longrunning.Operation][google.longrunning.Operation], - which allows you to track the operation status. We recommend - intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the metadata - for the long-running operation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_analyze_iam_policy_longrunning(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - output_config = asset_v1.IamPolicyAnalysisOutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.AnalyzeIamPolicyLongrunningRequest( - analysis_query=analysis_query, - output_config=output_config, - ) - - # Make the request - operation = client.analyze_iam_policy_longrunning(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]): - The request object. A request message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeIamPolicyLongrunningRequest): - request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("analysis_query.scope", request.analysis_query.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - asset_service.AnalyzeIamPolicyLongrunningResponse, - metadata_type=asset_service.AnalyzeIamPolicyLongrunningMetadata, - ) - - # Done; return the response. - return response - - def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: - r"""Analyze moving a resource to a specified destination - without kicking off the actual move. The analysis is - best effort depending on the user's permissions of - viewing different hierarchical policies and - configurations. The policies and configuration are - subject to change before the actual resource migration - takes place. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_analyze_move(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeMoveRequest( - resource="resource_value", - destination_parent="destination_parent_value", - ) - - # Make the request - response = client.analyze_move(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]): - The request object. The request message for performing - resource move analysis. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.AnalyzeMoveResponse: - The response message for resource - move analysis. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeMoveRequest): - request = asset_service.AnalyzeMoveRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_move] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: - r"""Issue a job that queries assets using a SQL statement compatible - with `BigQuery - SQL `__. - - If the query execution finishes within timeout and there's no - pagination, the full query results will be returned in the - ``QueryAssetsResponse``. - - Otherwise, full query results can be obtained by issuing extra - requests with the ``job_reference`` from the a previous - ``QueryAssets`` call. - - Note, the query result has approximately 10 GB limitation - enforced by - `BigQuery `__. - Queries return larger results will result in errors. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_query_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.QueryAssetsRequest( - statement="statement_value", - parent="parent_value", - ) - - # Make the request - response = client.query_assets(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]): - The request object. QueryAssets request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.QueryAssetsResponse: - QueryAssets response. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.QueryAssetsRequest): - request = asset_service.QueryAssetsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.query_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: - r"""Creates a saved query in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_create_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.CreateSavedQueryRequest( - parent="parent_value", - saved_query_id="saved_query_id_value", - ) - - # Make the request - response = client.create_saved_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.CreateSavedQueryRequest, dict]): - The request object. Request to create a saved query. - parent (str): - Required. The name of the project/folder/organization - where this saved_query should be created in. It can only - be an organization number (such as "organizations/123"), - a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id"), or a project number - (such as "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - saved_query (google.cloud.asset_v1.types.SavedQuery): - Required. The saved_query details. The ``name`` field - must be empty as it will be generated based on the - parent and saved_query_id. - - This corresponds to the ``saved_query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - saved_query_id (str): - Required. The ID to use for the saved query, which must - be unique in the specified parent. It will become the - final component of the saved query's resource name. - - This value should be 4-63 characters, and valid - characters are ``[a-z][0-9]-``. - - Notice that this field is required in the saved query - creation, and the ``name`` field of the ``saved_query`` - will be ignored. - - This corresponds to the ``saved_query_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, saved_query, saved_query_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.CreateSavedQueryRequest): - request = asset_service.CreateSavedQueryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if saved_query is not None: - request.saved_query = saved_query - if saved_query_id is not None: - request.saved_query_id = saved_query_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: - r"""Gets details about a saved query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_get_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.GetSavedQueryRequest( - name="name_value", - ) - - # Make the request - response = client.get_saved_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.GetSavedQueryRequest, dict]): - The request object. Request to get a saved query. - name (str): - Required. The name of the saved query and it must be in - the format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.GetSavedQueryRequest): - request = asset_service.GetSavedQueryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesPager: - r"""Lists all saved queries in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_list_saved_queries(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.ListSavedQueriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_saved_queries(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.ListSavedQueriesRequest, dict]): - The request object. Request to list saved queries. - parent (str): - Required. The parent - project/folder/organization whose - savedQueries are to be listed. It can - only be using - project/folder/organization number (such - as "folders/12345")", or a project ID - (such as "projects/my-project-id"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager: - Response of listing saved queries. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListSavedQueriesRequest): - request = asset_service.ListSavedQueriesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_saved_queries] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSavedQueriesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: - r"""Updates a saved query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_update_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) - - # Make the request - response = client.update_saved_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.UpdateSavedQueryRequest, dict]): - The request object. Request to update a saved query. - saved_query (google.cloud.asset_v1.types.SavedQuery): - Required. The saved query to update. - - The saved query's ``name`` field is used to identify the - one to update, which has format as below: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - This corresponds to the ``saved_query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [saved_query, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.UpdateSavedQueryRequest): - request = asset_service.UpdateSavedQueryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if saved_query is not None: - request.saved_query = saved_query - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("saved_query.name", request.saved_query.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a saved query. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_delete_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.DeleteSavedQueryRequest( - name="name_value", - ) - - # Make the request - client.delete_saved_query(request=request) - - Args: - request (Union[google.cloud.asset_v1.types.DeleteSavedQueryRequest, dict]): - The request object. Request to delete a saved query. - name (str): - Required. The name of the saved query to delete. It must - be in the format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.DeleteSavedQueryRequest): - request = asset_service.DeleteSavedQueryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_saved_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: - r"""Gets effective IAM policies for a batch of resources. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_batch_get_effective_iam_policies(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetEffectiveIamPoliciesRequest( - scope="scope_value", - names=['names_value1', 'names_value2'], - ) - - # Make the request - response = client.batch_get_effective_iam_policies(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]): - The request object. A request message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: - A response message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.BatchGetEffectiveIamPoliciesRequest): - request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesPager: - r"""Analyzes organization policies under a scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_analyze_org_policies(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPoliciesRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest, dict]): - The request object. A request message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - scope (str): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - constraint (str): - Required. The name of the constraint - to analyze organization policies for. - The response only contains analyzed - organization policies for the provided - constraint. - - This corresponds to the ``constraint`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - The expression to filter - [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - Filtering is currently available for bare literal values - and the following fields: - - - consolidated_policy.attached_resource - - consolidated_policy.rules.enforce - - When filtering by a specific field, the only supported - operator is ``=``. For example, filtering by - consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return all the Organization Policy results attached - to "folders/001". - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager: - The response message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPoliciesRequest): - request = asset_service.AnalyzeOrgPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if constraint is not None: - request.constraint = constraint - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.AnalyzeOrgPoliciesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: - r"""Analyzes organization policies governed containers - (projects, folders or organization) under a scope. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_analyze_org_policy_governed_containers(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_containers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest, dict]): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - scope (str): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - The output containers will also be limited to the ones - governed by those in-scope organization policies. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - constraint (str): - Required. The name of the constraint - to analyze governed containers for. The - analysis only contains organization - policies for the provided constraint. - - This corresponds to the ``constraint`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - The expression to filter - [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. - Filtering is currently available for bare literal values - and the following fields: - - - parent - - consolidated_policy.rules.enforce - - When filtering by a specific field, the only supported - operator is ``=``. For example, filtering by - parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all the containers under "folders/001". - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if constraint is not None: - request.constraint = constraint - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.AnalyzeOrgPolicyGovernedContainersPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: - r"""Analyzes organization policies governed assets (Google Cloud - resources or policies) under a scope. This RPC supports custom - constraints and the following canned constraints: - - - constraints/ainotebooks.accessMode - - constraints/ainotebooks.disableFileDownloads - - constraints/ainotebooks.disableRootAccess - - constraints/ainotebooks.disableTerminal - - constraints/ainotebooks.environmentOptions - - constraints/ainotebooks.requireAutoUpgradeSchedule - - constraints/ainotebooks.restrictVpcNetworks - - constraints/compute.disableGuestAttributesAccess - - constraints/compute.disableInstanceDataAccessApis - - constraints/compute.disableNestedVirtualization - - constraints/compute.disableSerialPortAccess - - constraints/compute.disableSerialPortLogging - - constraints/compute.disableVpcExternalIpv6 - - constraints/compute.requireOsLogin - - constraints/compute.requireShieldedVm - - constraints/compute.restrictLoadBalancerCreationForTypes - - constraints/compute.restrictProtocolForwardingCreationForTypes - - constraints/compute.restrictXpnProjectLienRemoval - - constraints/compute.setNewProjectDefaultToZonalDNSOnly - - constraints/compute.skipDefaultNetworkCreation - - constraints/compute.trustedImageProjects - - constraints/compute.vmCanIpForward - - constraints/compute.vmExternalIpAccess - - constraints/gcp.detailedAuditLoggingMode - - constraints/gcp.resourceLocations - - constraints/iam.allowedPolicyMemberDomains - - constraints/iam.automaticIamGrantsForDefaultServiceAccounts - - constraints/iam.disableServiceAccountCreation - - constraints/iam.disableServiceAccountKeyCreation - - constraints/iam.disableServiceAccountKeyUpload - - constraints/iam.restrictCrossProjectServiceAccountLienRemoval - - constraints/iam.serviceAccountKeyExpiryHours - - constraints/resourcemanager.accessBoundaries - - constraints/resourcemanager.allowedExportDestinations - - constraints/sql.restrictAuthorizedNetworks - - constraints/sql.restrictNoncompliantDiagnosticDataAccess - - constraints/sql.restrictNoncompliantResourceCreation - - constraints/sql.restrictPublicIp - - constraints/storage.publicAccessPrevention - - constraints/storage.restrictAuthTypes - - constraints/storage.uniformBucketLevelAccess - - This RPC only returns either resources of types `supported by - search - APIs `__ - or IAM policies. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1 - - def sample_analyze_org_policy_governed_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest, dict]): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - scope (str): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - The output assets will also be limited to the ones - governed by those in-scope organization policies. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - constraint (str): - Required. The name of the constraint - to analyze governed assets for. The - analysis only contains analyzed - organization policies for the provided - constraint. - - This corresponds to the ``constraint`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - The expression to filter - [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. - - For governed resources, filtering is currently available - for bare literal values and the following fields: - - - governed_resource.project - - governed_resource.folders - - consolidated_policy.rules.enforce When filtering by - ``governed_resource.project`` or - ``consolidated_policy.rules.enforce``, the only - supported operator is ``=``. When filtering by - ``governed_resource.folders``, the supported - operators are ``=`` and ``:``. For example, filtering - by ``governed_resource.project="projects/12345678"`` - will return all the governed resources under - "projects/12345678", including the project itself if - applicable. - - For governed IAM policies, filtering is currently - available for bare literal values and the following - fields: - - - governed_iam_policy.project - - governed_iam_policy.folders - - consolidated_policy.rules.enforce When filtering by - ``governed_iam_policy.project`` or - ``consolidated_policy.rules.enforce``, the only - supported operator is ``=``. When filtering by - ``governed_iam_policy.folders``, the supported - operators are ``=`` and ``:``. For example, filtering - by ``governed_iam_policy.folders:"folders/12345678"`` - will return all the governed IAM policies under - "folders/001". - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, constraint, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if constraint is not None: - request.constraint = constraint - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.AnalyzeOrgPolicyGovernedAssetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AssetServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/pagers.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/pagers.py deleted file mode 100644 index 5620c82f3151..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/pagers.py +++ /dev/null @@ -1,1001 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets - - -class ListAssetsPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.ListAssetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.ListAssetsResponse], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.asset_v1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assets.Asset]: - for page in self.pages: - yield from page.assets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetsAsyncPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.ListAssetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.asset_v1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assets.Asset]: - async def async_generator(): - async for page in self.pages: - for response in page.assets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllResourcesPager: - """A pager for iterating through ``search_all_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchAllResources`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.SearchAllResourcesResponse], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.SearchAllResourcesRequest): - The initial request object. - response (google.cloud.asset_v1.types.SearchAllResourcesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.SearchAllResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assets.ResourceSearchResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllResourcesAsyncPager: - """A pager for iterating through ``search_all_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchAllResources`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.SearchAllResourcesRequest): - The initial request object. - response (google.cloud.asset_v1.types.SearchAllResourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllIamPoliciesPager: - """A pager for iterating through ``search_all_iam_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchAllIamPolicies`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.SearchAllIamPoliciesResponse], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.SearchAllIamPoliciesRequest): - The initial request object. - response (google.cloud.asset_v1.types.SearchAllIamPoliciesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllIamPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.SearchAllIamPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllIamPoliciesAsyncPager: - """A pager for iterating through ``search_all_iam_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchAllIamPolicies`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.SearchAllIamPoliciesRequest): - The initial request object. - response (google.cloud.asset_v1.types.SearchAllIamPoliciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllIamPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSavedQueriesPager: - """A pager for iterating through ``list_saved_queries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``saved_queries`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSavedQueries`` requests and continue to iterate - through the ``saved_queries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.ListSavedQueriesResponse], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.ListSavedQueriesRequest): - The initial request object. - response (google.cloud.asset_v1.types.ListSavedQueriesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.ListSavedQueriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.ListSavedQueriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[asset_service.SavedQuery]: - for page in self.pages: - yield from page.saved_queries - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSavedQueriesAsyncPager: - """A pager for iterating through ``list_saved_queries`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``saved_queries`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSavedQueries`` requests and continue to iterate - through the ``saved_queries`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.ListSavedQueriesRequest): - The initial request object. - response (google.cloud.asset_v1.types.ListSavedQueriesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.ListSavedQueriesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: - async def async_generator(): - async for page in self.pages: - for response in page.saved_queries: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AnalyzeOrgPoliciesPager: - """A pager for iterating through ``analyze_org_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``org_policy_results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``AnalyzeOrgPolicies`` requests and continue to iterate - through the ``org_policy_results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest): - The initial request object. - response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.AnalyzeOrgPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: - for page in self.pages: - yield from page.org_policy_results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AnalyzeOrgPoliciesAsyncPager: - """A pager for iterating through ``analyze_org_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``org_policy_results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``AnalyzeOrgPolicies`` requests and continue to iterate - through the ``org_policy_results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest): - The initial request object. - response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.AnalyzeOrgPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: - async def async_generator(): - async for page in self.pages: - for response in page.org_policy_results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AnalyzeOrgPolicyGovernedContainersPager: - """A pager for iterating through ``analyze_org_policy_governed_containers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``governed_containers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``AnalyzeOrgPolicyGovernedContainers`` requests and continue to iterate - through the ``governed_containers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest): - The initial request object. - response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: - for page in self.pages: - yield from page.governed_containers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AnalyzeOrgPolicyGovernedContainersAsyncPager: - """A pager for iterating through ``analyze_org_policy_governed_containers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``governed_containers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``AnalyzeOrgPolicyGovernedContainers`` requests and continue to iterate - through the ``governed_containers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest): - The initial request object. - response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: - async def async_generator(): - async for page in self.pages: - for response in page.governed_containers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AnalyzeOrgPolicyGovernedAssetsPager: - """A pager for iterating through ``analyze_org_policy_governed_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``governed_assets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``AnalyzeOrgPolicyGovernedAssets`` requests and continue to iterate - through the ``governed_assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest): - The initial request object. - response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: - for page in self.pages: - yield from page.governed_assets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AnalyzeOrgPolicyGovernedAssetsAsyncPager: - """A pager for iterating through ``analyze_org_policy_governed_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``governed_assets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``AnalyzeOrgPolicyGovernedAssets`` requests and continue to iterate - through the ``governed_assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest): - The initial request object. - response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: - async def async_generator(): - async for page in self.pages: - for response in page.governed_assets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/README.rst b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/README.rst deleted file mode 100644 index f0467812ea79..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AssetServiceTransport` is the ABC for all transports. -- public child `AssetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AssetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAssetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AssetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/__init__.py deleted file mode 100644 index 315eb22bd6cb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AssetServiceTransport -from .grpc import AssetServiceGrpcTransport -from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .rest import AssetServiceRestTransport -from .rest import AssetServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport - -__all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/base.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/base.py deleted file mode 100644 index 7c6967f8978b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/base.py +++ /dev/null @@ -1,639 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.asset_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AssetServiceTransport(abc.ABC): - """Abstract transport class for AssetService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudasset.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.export_assets: gapic_v1.method.wrap_method( - self.export_assets, - default_timeout=60.0, - client_info=client_info, - ), - self.list_assets: gapic_v1.method.wrap_method( - self.list_assets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_assets_history: gapic_v1.method.wrap_method( - self.batch_get_assets_history, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_feed: gapic_v1.method.wrap_method( - self.create_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.get_feed: gapic_v1.method.wrap_method( - self.get_feed, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_feeds: gapic_v1.method.wrap_method( - self.list_feeds, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_feed: gapic_v1.method.wrap_method( - self.update_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_feed: gapic_v1.method.wrap_method( - self.delete_feed, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.search_all_resources: gapic_v1.method.wrap_method( - self.search_all_resources, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.search_all_iam_policies: gapic_v1.method.wrap_method( - self.search_all_iam_policies, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.analyze_iam_policy: gapic_v1.method.wrap_method( - self.analyze_iam_policy, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.analyze_iam_policy_longrunning: gapic_v1.method.wrap_method( - self.analyze_iam_policy_longrunning, - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_move: gapic_v1.method.wrap_method( - self.analyze_move, - default_timeout=None, - client_info=client_info, - ), - self.query_assets: gapic_v1.method.wrap_method( - self.query_assets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=200.0, - ), - default_timeout=200.0, - client_info=client_info, - ), - self.create_saved_query: gapic_v1.method.wrap_method( - self.create_saved_query, - default_timeout=60.0, - client_info=client_info, - ), - self.get_saved_query: gapic_v1.method.wrap_method( - self.get_saved_query, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_saved_queries: gapic_v1.method.wrap_method( - self.list_saved_queries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_saved_query: gapic_v1.method.wrap_method( - self.update_saved_query, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_saved_query: gapic_v1.method.wrap_method( - self.delete_saved_query, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_effective_iam_policies: gapic_v1.method.wrap_method( - self.batch_get_effective_iam_policies, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.analyze_org_policies: gapic_v1.method.wrap_method( - self.analyze_org_policies, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_org_policy_governed_containers: gapic_v1.method.wrap_method( - self.analyze_org_policy_governed_containers, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_org_policy_governed_assets: gapic_v1.method.wrap_method( - self.analyze_org_policy_governed_assets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Union[ - asset_service.ListAssetsResponse, - Awaitable[asset_service.ListAssetsResponse] - ]]: - raise NotImplementedError() - - @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Union[ - asset_service.BatchGetAssetsHistoryResponse, - Awaitable[asset_service.BatchGetAssetsHistoryResponse] - ]]: - raise NotImplementedError() - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: - raise NotImplementedError() - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: - raise NotImplementedError() - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Union[ - asset_service.ListFeedsResponse, - Awaitable[asset_service.ListFeedsResponse] - ]]: - raise NotImplementedError() - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: - raise NotImplementedError() - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Union[ - asset_service.SearchAllResourcesResponse, - Awaitable[asset_service.SearchAllResourcesResponse] - ]]: - raise NotImplementedError() - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Union[ - asset_service.SearchAllIamPoliciesResponse, - Awaitable[asset_service.SearchAllIamPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Union[ - asset_service.AnalyzeIamPolicyResponse, - Awaitable[asset_service.AnalyzeIamPolicyResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Union[ - asset_service.AnalyzeMoveResponse, - Awaitable[asset_service.AnalyzeMoveResponse] - ]]: - raise NotImplementedError() - - @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Union[ - asset_service.QueryAssetsResponse, - Awaitable[asset_service.QueryAssetsResponse] - ]]: - raise NotImplementedError() - - @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: - raise NotImplementedError() - - @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: - raise NotImplementedError() - - @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Union[ - asset_service.ListSavedQueriesResponse, - Awaitable[asset_service.ListSavedQueriesResponse] - ]]: - raise NotImplementedError() - - @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: - raise NotImplementedError() - - @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Union[ - asset_service.BatchGetEffectiveIamPoliciesResponse, - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Union[ - asset_service.AnalyzeOrgPoliciesResponse, - Awaitable[asset_service.AnalyzeOrgPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] - ]]: - raise NotImplementedError() - - @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] - ]]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AssetServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/grpc.py deleted file mode 100644 index 782cb9469c47..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ /dev/null @@ -1,1073 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcTransport(AssetServiceTransport): - """gRPC backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: - r"""Return a callable for the export assets method over gRPC. - - Exports assets with time and resource types to a given Cloud - Storage location/BigQuery table. For Cloud Storage location - destinations, the output format is newline-delimited JSON. Each - line represents a - [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in - the JSON format; for BigQuery table destinations, the output - table stores the fields in asset Protobuf as columns. This API - implements the - [google.longrunning.Operation][google.longrunning.Operation] - API, which allows you to keep track of the export. We recommend - intervals of at least 2 seconds with exponential retry to poll - the export operation result. For regular-size resource parent, - the export operation usually finishes within 5 minutes. - - Returns: - Callable[[~.ExportAssetsRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', - request_serializer=asset_service.ExportAssetsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_assets'] - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: - r"""Return a callable for the list assets method over gRPC. - - Lists assets with time and resource types and returns - paged results in response. - - Returns: - Callable[[~.ListAssetsRequest], - ~.ListAssetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', - request_serializer=asset_service.ListAssetsRequest.serialize, - response_deserializer=asset_service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: - r"""Return a callable for the batch get assets history method over gRPC. - - Batch gets the update history of assets that overlap a time - window. For IAM_POLICY content, this API outputs history when - the asset and its attached IAM POLICY both exist. This can - create gaps in the output history. Otherwise, this API outputs - history with asset in both non-delete or deleted status. If a - specified asset does not exist, this API returns an - INVALID_ARGUMENT error. - - Returns: - Callable[[~.BatchGetAssetsHistoryRequest], - ~.BatchGetAssetsHistoryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', - request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, - response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, - ) - return self._stubs['batch_get_assets_history'] - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: - r"""Return a callable for the create feed method over gRPC. - - Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - Returns: - Callable[[~.CreateFeedRequest], - ~.Feed]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', - request_serializer=asset_service.CreateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['create_feed'] - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: - r"""Return a callable for the get feed method over gRPC. - - Gets details about an asset feed. - - Returns: - Callable[[~.GetFeedRequest], - ~.Feed]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', - request_serializer=asset_service.GetFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['get_feed'] - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: - r"""Return a callable for the list feeds method over gRPC. - - Lists all asset feeds in a parent - project/folder/organization. - - Returns: - Callable[[~.ListFeedsRequest], - ~.ListFeedsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', - request_serializer=asset_service.ListFeedsRequest.serialize, - response_deserializer=asset_service.ListFeedsResponse.deserialize, - ) - return self._stubs['list_feeds'] - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: - r"""Return a callable for the update feed method over gRPC. - - Updates an asset feed configuration. - - Returns: - Callable[[~.UpdateFeedRequest], - ~.Feed]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', - request_serializer=asset_service.UpdateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['update_feed'] - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete feed method over gRPC. - - Deletes an asset feed. - - Returns: - Callable[[~.DeleteFeedRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', - request_serializer=asset_service.DeleteFeedRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_feed'] - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: - r"""Return a callable for the search all resources method over gRPC. - - Searches all Google Cloud resources within the specified scope, - such as a project, folder, or organization. The caller must be - granted the ``cloudasset.assets.searchAllResources`` permission - on the desired scope, otherwise the request will be rejected. - - Returns: - Callable[[~.SearchAllResourcesRequest], - ~.SearchAllResourcesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', - request_serializer=asset_service.SearchAllResourcesRequest.serialize, - response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, - ) - return self._stubs['search_all_resources'] - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: - r"""Return a callable for the search all iam policies method over gRPC. - - Searches all IAM policies within the specified scope, such as a - project, folder, or organization. The caller must be granted the - ``cloudasset.assets.searchAllIamPolicies`` permission on the - desired scope, otherwise the request will be rejected. - - Returns: - Callable[[~.SearchAllIamPoliciesRequest], - ~.SearchAllIamPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', - request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, - response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, - ) - return self._stubs['search_all_iam_policies'] - - @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: - r"""Return a callable for the analyze iam policy method over gRPC. - - Analyzes IAM policies to answer which identities have - what accesses on which resources. - - Returns: - Callable[[~.AnalyzeIamPolicyRequest], - ~.AnalyzeIamPolicyResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', - request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, - response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, - ) - return self._stubs['analyze_iam_policy'] - - @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: - r"""Return a callable for the analyze iam policy longrunning method over gRPC. - - Analyzes IAM policies asynchronously to answer which identities - have what accesses on which resources, and writes the analysis - results to a Google Cloud Storage or a BigQuery destination. For - Cloud Storage destination, the output format is the JSON format - that represents a - [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. - This method implements the - [google.longrunning.Operation][google.longrunning.Operation], - which allows you to track the operation status. We recommend - intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the metadata - for the long-running operation. - - Returns: - Callable[[~.AnalyzeIamPolicyLongrunningRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', - request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['analyze_iam_policy_longrunning'] - - @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: - r"""Return a callable for the analyze move method over gRPC. - - Analyze moving a resource to a specified destination - without kicking off the actual move. The analysis is - best effort depending on the user's permissions of - viewing different hierarchical policies and - configurations. The policies and configuration are - subject to change before the actual resource migration - takes place. - - Returns: - Callable[[~.AnalyzeMoveRequest], - ~.AnalyzeMoveResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', - request_serializer=asset_service.AnalyzeMoveRequest.serialize, - response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, - ) - return self._stubs['analyze_move'] - - @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: - r"""Return a callable for the query assets method over gRPC. - - Issue a job that queries assets using a SQL statement compatible - with `BigQuery - SQL `__. - - If the query execution finishes within timeout and there's no - pagination, the full query results will be returned in the - ``QueryAssetsResponse``. - - Otherwise, full query results can be obtained by issuing extra - requests with the ``job_reference`` from the a previous - ``QueryAssets`` call. - - Note, the query result has approximately 10 GB limitation - enforced by - `BigQuery `__. - Queries return larger results will result in errors. - - Returns: - Callable[[~.QueryAssetsRequest], - ~.QueryAssetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', - request_serializer=asset_service.QueryAssetsRequest.serialize, - response_deserializer=asset_service.QueryAssetsResponse.deserialize, - ) - return self._stubs['query_assets'] - - @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: - r"""Return a callable for the create saved query method over gRPC. - - Creates a saved query in a parent - project/folder/organization. - - Returns: - Callable[[~.CreateSavedQueryRequest], - ~.SavedQuery]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', - request_serializer=asset_service.CreateSavedQueryRequest.serialize, - response_deserializer=asset_service.SavedQuery.deserialize, - ) - return self._stubs['create_saved_query'] - - @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: - r"""Return a callable for the get saved query method over gRPC. - - Gets details about a saved query. - - Returns: - Callable[[~.GetSavedQueryRequest], - ~.SavedQuery]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', - request_serializer=asset_service.GetSavedQueryRequest.serialize, - response_deserializer=asset_service.SavedQuery.deserialize, - ) - return self._stubs['get_saved_query'] - - @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: - r"""Return a callable for the list saved queries method over gRPC. - - Lists all saved queries in a parent - project/folder/organization. - - Returns: - Callable[[~.ListSavedQueriesRequest], - ~.ListSavedQueriesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', - request_serializer=asset_service.ListSavedQueriesRequest.serialize, - response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, - ) - return self._stubs['list_saved_queries'] - - @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: - r"""Return a callable for the update saved query method over gRPC. - - Updates a saved query. - - Returns: - Callable[[~.UpdateSavedQueryRequest], - ~.SavedQuery]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', - request_serializer=asset_service.UpdateSavedQueryRequest.serialize, - response_deserializer=asset_service.SavedQuery.deserialize, - ) - return self._stubs['update_saved_query'] - - @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete saved query method over gRPC. - - Deletes a saved query. - - Returns: - Callable[[~.DeleteSavedQueryRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', - request_serializer=asset_service.DeleteSavedQueryRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_saved_query'] - - @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: - r"""Return a callable for the batch get effective iam - policies method over gRPC. - - Gets effective IAM policies for a batch of resources. - - Returns: - Callable[[~.BatchGetEffectiveIamPoliciesRequest], - ~.BatchGetEffectiveIamPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', - request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, - response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, - ) - return self._stubs['batch_get_effective_iam_policies'] - - @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: - r"""Return a callable for the analyze org policies method over gRPC. - - Analyzes organization policies under a scope. - - Returns: - Callable[[~.AnalyzeOrgPoliciesRequest], - ~.AnalyzeOrgPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', - request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, - ) - return self._stubs['analyze_org_policies'] - - @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: - r"""Return a callable for the analyze org policy governed - containers method over gRPC. - - Analyzes organization policies governed containers - (projects, folders or organization) under a scope. - - Returns: - Callable[[~.AnalyzeOrgPolicyGovernedContainersRequest], - ~.AnalyzeOrgPolicyGovernedContainersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, - ) - return self._stubs['analyze_org_policy_governed_containers'] - - @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: - r"""Return a callable for the analyze org policy governed - assets method over gRPC. - - Analyzes organization policies governed assets (Google Cloud - resources or policies) under a scope. This RPC supports custom - constraints and the following canned constraints: - - - constraints/ainotebooks.accessMode - - constraints/ainotebooks.disableFileDownloads - - constraints/ainotebooks.disableRootAccess - - constraints/ainotebooks.disableTerminal - - constraints/ainotebooks.environmentOptions - - constraints/ainotebooks.requireAutoUpgradeSchedule - - constraints/ainotebooks.restrictVpcNetworks - - constraints/compute.disableGuestAttributesAccess - - constraints/compute.disableInstanceDataAccessApis - - constraints/compute.disableNestedVirtualization - - constraints/compute.disableSerialPortAccess - - constraints/compute.disableSerialPortLogging - - constraints/compute.disableVpcExternalIpv6 - - constraints/compute.requireOsLogin - - constraints/compute.requireShieldedVm - - constraints/compute.restrictLoadBalancerCreationForTypes - - constraints/compute.restrictProtocolForwardingCreationForTypes - - constraints/compute.restrictXpnProjectLienRemoval - - constraints/compute.setNewProjectDefaultToZonalDNSOnly - - constraints/compute.skipDefaultNetworkCreation - - constraints/compute.trustedImageProjects - - constraints/compute.vmCanIpForward - - constraints/compute.vmExternalIpAccess - - constraints/gcp.detailedAuditLoggingMode - - constraints/gcp.resourceLocations - - constraints/iam.allowedPolicyMemberDomains - - constraints/iam.automaticIamGrantsForDefaultServiceAccounts - - constraints/iam.disableServiceAccountCreation - - constraints/iam.disableServiceAccountKeyCreation - - constraints/iam.disableServiceAccountKeyUpload - - constraints/iam.restrictCrossProjectServiceAccountLienRemoval - - constraints/iam.serviceAccountKeyExpiryHours - - constraints/resourcemanager.accessBoundaries - - constraints/resourcemanager.allowedExportDestinations - - constraints/sql.restrictAuthorizedNetworks - - constraints/sql.restrictNoncompliantDiagnosticDataAccess - - constraints/sql.restrictNoncompliantResourceCreation - - constraints/sql.restrictPublicIp - - constraints/storage.publicAccessPrevention - - constraints/storage.restrictAuthTypes - - constraints/storage.uniformBucketLevelAccess - - This RPC only returns either resources of types `supported by - search - APIs `__ - or IAM policies. - - Returns: - Callable[[~.AnalyzeOrgPolicyGovernedAssetsRequest], - ~.AnalyzeOrgPolicyGovernedAssetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, - ) - return self._stubs['analyze_org_policy_governed_assets'] - - def close(self): - self._logged_channel.close() - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AssetServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py deleted file mode 100644 index a4de81966854..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1363 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AssetServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): - """gRPC AsyncIO backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export assets method over gRPC. - - Exports assets with time and resource types to a given Cloud - Storage location/BigQuery table. For Cloud Storage location - destinations, the output format is newline-delimited JSON. Each - line represents a - [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in - the JSON format; for BigQuery table destinations, the output - table stores the fields in asset Protobuf as columns. This API - implements the - [google.longrunning.Operation][google.longrunning.Operation] - API, which allows you to keep track of the export. We recommend - intervals of at least 2 seconds with exponential retry to poll - the export operation result. For regular-size resource parent, - the export operation usually finishes within 5 minutes. - - Returns: - Callable[[~.ExportAssetsRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', - request_serializer=asset_service.ExportAssetsRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_assets'] - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Awaitable[asset_service.ListAssetsResponse]]: - r"""Return a callable for the list assets method over gRPC. - - Lists assets with time and resource types and returns - paged results in response. - - Returns: - Callable[[~.ListAssetsRequest], - Awaitable[~.ListAssetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', - request_serializer=asset_service.ListAssetsRequest.serialize, - response_deserializer=asset_service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Awaitable[asset_service.BatchGetAssetsHistoryResponse]]: - r"""Return a callable for the batch get assets history method over gRPC. - - Batch gets the update history of assets that overlap a time - window. For IAM_POLICY content, this API outputs history when - the asset and its attached IAM POLICY both exist. This can - create gaps in the output history. Otherwise, this API outputs - history with asset in both non-delete or deleted status. If a - specified asset does not exist, this API returns an - INVALID_ARGUMENT error. - - Returns: - Callable[[~.BatchGetAssetsHistoryRequest], - Awaitable[~.BatchGetAssetsHistoryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', - request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, - response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, - ) - return self._stubs['batch_get_assets_history'] - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Awaitable[asset_service.Feed]]: - r"""Return a callable for the create feed method over gRPC. - - Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - Returns: - Callable[[~.CreateFeedRequest], - Awaitable[~.Feed]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', - request_serializer=asset_service.CreateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['create_feed'] - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Awaitable[asset_service.Feed]]: - r"""Return a callable for the get feed method over gRPC. - - Gets details about an asset feed. - - Returns: - Callable[[~.GetFeedRequest], - Awaitable[~.Feed]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', - request_serializer=asset_service.GetFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['get_feed'] - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Awaitable[asset_service.ListFeedsResponse]]: - r"""Return a callable for the list feeds method over gRPC. - - Lists all asset feeds in a parent - project/folder/organization. - - Returns: - Callable[[~.ListFeedsRequest], - Awaitable[~.ListFeedsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', - request_serializer=asset_service.ListFeedsRequest.serialize, - response_deserializer=asset_service.ListFeedsResponse.deserialize, - ) - return self._stubs['list_feeds'] - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Awaitable[asset_service.Feed]]: - r"""Return a callable for the update feed method over gRPC. - - Updates an asset feed configuration. - - Returns: - Callable[[~.UpdateFeedRequest], - Awaitable[~.Feed]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', - request_serializer=asset_service.UpdateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['update_feed'] - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete feed method over gRPC. - - Deletes an asset feed. - - Returns: - Callable[[~.DeleteFeedRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', - request_serializer=asset_service.DeleteFeedRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_feed'] - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Awaitable[asset_service.SearchAllResourcesResponse]]: - r"""Return a callable for the search all resources method over gRPC. - - Searches all Google Cloud resources within the specified scope, - such as a project, folder, or organization. The caller must be - granted the ``cloudasset.assets.searchAllResources`` permission - on the desired scope, otherwise the request will be rejected. - - Returns: - Callable[[~.SearchAllResourcesRequest], - Awaitable[~.SearchAllResourcesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', - request_serializer=asset_service.SearchAllResourcesRequest.serialize, - response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, - ) - return self._stubs['search_all_resources'] - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Awaitable[asset_service.SearchAllIamPoliciesResponse]]: - r"""Return a callable for the search all iam policies method over gRPC. - - Searches all IAM policies within the specified scope, such as a - project, folder, or organization. The caller must be granted the - ``cloudasset.assets.searchAllIamPolicies`` permission on the - desired scope, otherwise the request will be rejected. - - Returns: - Callable[[~.SearchAllIamPoliciesRequest], - Awaitable[~.SearchAllIamPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', - request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, - response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, - ) - return self._stubs['search_all_iam_policies'] - - @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Awaitable[asset_service.AnalyzeIamPolicyResponse]]: - r"""Return a callable for the analyze iam policy method over gRPC. - - Analyzes IAM policies to answer which identities have - what accesses on which resources. - - Returns: - Callable[[~.AnalyzeIamPolicyRequest], - Awaitable[~.AnalyzeIamPolicyResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', - request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, - response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, - ) - return self._stubs['analyze_iam_policy'] - - @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the analyze iam policy longrunning method over gRPC. - - Analyzes IAM policies asynchronously to answer which identities - have what accesses on which resources, and writes the analysis - results to a Google Cloud Storage or a BigQuery destination. For - Cloud Storage destination, the output format is the JSON format - that represents a - [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. - This method implements the - [google.longrunning.Operation][google.longrunning.Operation], - which allows you to track the operation status. We recommend - intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the metadata - for the long-running operation. - - Returns: - Callable[[~.AnalyzeIamPolicyLongrunningRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', - request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['analyze_iam_policy_longrunning'] - - @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Awaitable[asset_service.AnalyzeMoveResponse]]: - r"""Return a callable for the analyze move method over gRPC. - - Analyze moving a resource to a specified destination - without kicking off the actual move. The analysis is - best effort depending on the user's permissions of - viewing different hierarchical policies and - configurations. The policies and configuration are - subject to change before the actual resource migration - takes place. - - Returns: - Callable[[~.AnalyzeMoveRequest], - Awaitable[~.AnalyzeMoveResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', - request_serializer=asset_service.AnalyzeMoveRequest.serialize, - response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, - ) - return self._stubs['analyze_move'] - - @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Awaitable[asset_service.QueryAssetsResponse]]: - r"""Return a callable for the query assets method over gRPC. - - Issue a job that queries assets using a SQL statement compatible - with `BigQuery - SQL `__. - - If the query execution finishes within timeout and there's no - pagination, the full query results will be returned in the - ``QueryAssetsResponse``. - - Otherwise, full query results can be obtained by issuing extra - requests with the ``job_reference`` from the a previous - ``QueryAssets`` call. - - Note, the query result has approximately 10 GB limitation - enforced by - `BigQuery `__. - Queries return larger results will result in errors. - - Returns: - Callable[[~.QueryAssetsRequest], - Awaitable[~.QueryAssetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', - request_serializer=asset_service.QueryAssetsRequest.serialize, - response_deserializer=asset_service.QueryAssetsResponse.deserialize, - ) - return self._stubs['query_assets'] - - @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: - r"""Return a callable for the create saved query method over gRPC. - - Creates a saved query in a parent - project/folder/organization. - - Returns: - Callable[[~.CreateSavedQueryRequest], - Awaitable[~.SavedQuery]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', - request_serializer=asset_service.CreateSavedQueryRequest.serialize, - response_deserializer=asset_service.SavedQuery.deserialize, - ) - return self._stubs['create_saved_query'] - - @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: - r"""Return a callable for the get saved query method over gRPC. - - Gets details about a saved query. - - Returns: - Callable[[~.GetSavedQueryRequest], - Awaitable[~.SavedQuery]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', - request_serializer=asset_service.GetSavedQueryRequest.serialize, - response_deserializer=asset_service.SavedQuery.deserialize, - ) - return self._stubs['get_saved_query'] - - @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Awaitable[asset_service.ListSavedQueriesResponse]]: - r"""Return a callable for the list saved queries method over gRPC. - - Lists all saved queries in a parent - project/folder/organization. - - Returns: - Callable[[~.ListSavedQueriesRequest], - Awaitable[~.ListSavedQueriesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', - request_serializer=asset_service.ListSavedQueriesRequest.serialize, - response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, - ) - return self._stubs['list_saved_queries'] - - @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: - r"""Return a callable for the update saved query method over gRPC. - - Updates a saved query. - - Returns: - Callable[[~.UpdateSavedQueryRequest], - Awaitable[~.SavedQuery]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', - request_serializer=asset_service.UpdateSavedQueryRequest.serialize, - response_deserializer=asset_service.SavedQuery.deserialize, - ) - return self._stubs['update_saved_query'] - - @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete saved query method over gRPC. - - Deletes a saved query. - - Returns: - Callable[[~.DeleteSavedQueryRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', - request_serializer=asset_service.DeleteSavedQueryRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_saved_query'] - - @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: - r"""Return a callable for the batch get effective iam - policies method over gRPC. - - Gets effective IAM policies for a batch of resources. - - Returns: - Callable[[~.BatchGetEffectiveIamPoliciesRequest], - Awaitable[~.BatchGetEffectiveIamPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', - request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, - response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, - ) - return self._stubs['batch_get_effective_iam_policies'] - - @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: - r"""Return a callable for the analyze org policies method over gRPC. - - Analyzes organization policies under a scope. - - Returns: - Callable[[~.AnalyzeOrgPoliciesRequest], - Awaitable[~.AnalyzeOrgPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', - request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, - ) - return self._stubs['analyze_org_policies'] - - @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: - r"""Return a callable for the analyze org policy governed - containers method over gRPC. - - Analyzes organization policies governed containers - (projects, folders or organization) under a scope. - - Returns: - Callable[[~.AnalyzeOrgPolicyGovernedContainersRequest], - Awaitable[~.AnalyzeOrgPolicyGovernedContainersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, - ) - return self._stubs['analyze_org_policy_governed_containers'] - - @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: - r"""Return a callable for the analyze org policy governed - assets method over gRPC. - - Analyzes organization policies governed assets (Google Cloud - resources or policies) under a scope. This RPC supports custom - constraints and the following canned constraints: - - - constraints/ainotebooks.accessMode - - constraints/ainotebooks.disableFileDownloads - - constraints/ainotebooks.disableRootAccess - - constraints/ainotebooks.disableTerminal - - constraints/ainotebooks.environmentOptions - - constraints/ainotebooks.requireAutoUpgradeSchedule - - constraints/ainotebooks.restrictVpcNetworks - - constraints/compute.disableGuestAttributesAccess - - constraints/compute.disableInstanceDataAccessApis - - constraints/compute.disableNestedVirtualization - - constraints/compute.disableSerialPortAccess - - constraints/compute.disableSerialPortLogging - - constraints/compute.disableVpcExternalIpv6 - - constraints/compute.requireOsLogin - - constraints/compute.requireShieldedVm - - constraints/compute.restrictLoadBalancerCreationForTypes - - constraints/compute.restrictProtocolForwardingCreationForTypes - - constraints/compute.restrictXpnProjectLienRemoval - - constraints/compute.setNewProjectDefaultToZonalDNSOnly - - constraints/compute.skipDefaultNetworkCreation - - constraints/compute.trustedImageProjects - - constraints/compute.vmCanIpForward - - constraints/compute.vmExternalIpAccess - - constraints/gcp.detailedAuditLoggingMode - - constraints/gcp.resourceLocations - - constraints/iam.allowedPolicyMemberDomains - - constraints/iam.automaticIamGrantsForDefaultServiceAccounts - - constraints/iam.disableServiceAccountCreation - - constraints/iam.disableServiceAccountKeyCreation - - constraints/iam.disableServiceAccountKeyUpload - - constraints/iam.restrictCrossProjectServiceAccountLienRemoval - - constraints/iam.serviceAccountKeyExpiryHours - - constraints/resourcemanager.accessBoundaries - - constraints/resourcemanager.allowedExportDestinations - - constraints/sql.restrictAuthorizedNetworks - - constraints/sql.restrictNoncompliantDiagnosticDataAccess - - constraints/sql.restrictNoncompliantResourceCreation - - constraints/sql.restrictPublicIp - - constraints/storage.publicAccessPrevention - - constraints/storage.restrictAuthTypes - - constraints/storage.uniformBucketLevelAccess - - This RPC only returns either resources of types `supported by - search - APIs `__ - or IAM policies. - - Returns: - Callable[[~.AnalyzeOrgPolicyGovernedAssetsRequest], - Awaitable[~.AnalyzeOrgPolicyGovernedAssetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', - request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, - response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, - ) - return self._stubs['analyze_org_policy_governed_assets'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.export_assets: self._wrap_method( - self.export_assets, - default_timeout=60.0, - client_info=client_info, - ), - self.list_assets: self._wrap_method( - self.list_assets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_assets_history: self._wrap_method( - self.batch_get_assets_history, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_feed: self._wrap_method( - self.create_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.get_feed: self._wrap_method( - self.get_feed, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_feeds: self._wrap_method( - self.list_feeds, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_feed: self._wrap_method( - self.update_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_feed: self._wrap_method( - self.delete_feed, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.search_all_resources: self._wrap_method( - self.search_all_resources, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.search_all_iam_policies: self._wrap_method( - self.search_all_iam_policies, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=30.0, - ), - default_timeout=30.0, - client_info=client_info, - ), - self.analyze_iam_policy: self._wrap_method( - self.analyze_iam_policy, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.analyze_iam_policy_longrunning: self._wrap_method( - self.analyze_iam_policy_longrunning, - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_move: self._wrap_method( - self.analyze_move, - default_timeout=None, - client_info=client_info, - ), - self.query_assets: self._wrap_method( - self.query_assets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=200.0, - ), - default_timeout=200.0, - client_info=client_info, - ), - self.create_saved_query: self._wrap_method( - self.create_saved_query, - default_timeout=60.0, - client_info=client_info, - ), - self.get_saved_query: self._wrap_method( - self.get_saved_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_saved_queries: self._wrap_method( - self.list_saved_queries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_saved_query: self._wrap_method( - self.update_saved_query, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_saved_query: self._wrap_method( - self.delete_saved_query, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.batch_get_effective_iam_policies: self._wrap_method( - self.batch_get_effective_iam_policies, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.analyze_org_policies: self._wrap_method( - self.analyze_org_policies, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_org_policy_governed_containers: self._wrap_method( - self.analyze_org_policy_governed_containers, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_org_policy_governed_assets: self._wrap_method( - self.analyze_org_policy_governed_assets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/rest.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/rest.py deleted file mode 100644 index 8e5f49bae43f..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ /dev/null @@ -1,4227 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.asset_v1.types import asset_service -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAssetServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AssetServiceRestInterceptor: - """Interceptor for AssetService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AssetServiceRestTransport. - - .. code-block:: python - class MyCustomAssetServiceInterceptor(AssetServiceRestInterceptor): - def pre_analyze_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_iam_policy_longrunning(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_iam_policy_longrunning(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_move(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_move(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_org_policies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_org_policies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_org_policy_governed_assets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_org_policy_governed_assets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_analyze_org_policy_governed_containers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_org_policy_governed_containers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_batch_get_assets_history(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_get_assets_history(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_batch_get_effective_iam_policies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_get_effective_iam_policies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_feed(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_saved_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_saved_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_saved_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_export_assets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_assets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_feed(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_saved_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_saved_query(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_assets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_assets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_feeds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_feeds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_saved_queries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_saved_queries(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_query_assets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_query_assets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_all_iam_policies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_all_iam_policies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_all_resources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_all_resources(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_feed(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_saved_query(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_saved_query(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) - client = AssetServiceClient(transport=transport) - - - """ - def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for analyze_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: - """Post-rpc interceptor for analyze_iam_policy - - DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_analyze_iam_policy` interceptor runs - before the `post_analyze_iam_policy_with_metadata` interceptor. - """ - return response - - def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeIamPolicyResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for analyze_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_analyze_iam_policy_with_metadata` - interceptor in new development instead of the `post_analyze_iam_policy` interceptor. - When both interceptors are used, this `post_analyze_iam_policy_with_metadata` interceptor runs after the - `post_analyze_iam_policy` interceptor. The (possibly modified) response returned by - `post_analyze_iam_policy` will be passed to - `post_analyze_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for analyze_iam_policy_longrunning - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for analyze_iam_policy_longrunning - - DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_analyze_iam_policy_longrunning` interceptor runs - before the `post_analyze_iam_policy_longrunning_with_metadata` interceptor. - """ - return response - - def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for analyze_iam_policy_longrunning - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_analyze_iam_policy_longrunning_with_metadata` - interceptor in new development instead of the `post_analyze_iam_policy_longrunning` interceptor. - When both interceptors are used, this `post_analyze_iam_policy_longrunning_with_metadata` interceptor runs after the - `post_analyze_iam_policy_longrunning` interceptor. The (possibly modified) response returned by - `post_analyze_iam_policy_longrunning` will be passed to - `post_analyze_iam_policy_longrunning_with_metadata`. - """ - return response, metadata - - def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for analyze_move - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: - """Post-rpc interceptor for analyze_move - - DEPRECATED. Please use the `post_analyze_move_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_analyze_move` interceptor runs - before the `post_analyze_move_with_metadata` interceptor. - """ - return response - - def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for analyze_move - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_analyze_move_with_metadata` - interceptor in new development instead of the `post_analyze_move` interceptor. - When both interceptors are used, this `post_analyze_move_with_metadata` interceptor runs after the - `post_analyze_move` interceptor. The (possibly modified) response returned by - `post_analyze_move` will be passed to - `post_analyze_move_with_metadata`. - """ - return response, metadata - - def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for analyze_org_policies - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: - """Post-rpc interceptor for analyze_org_policies - - DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_analyze_org_policies` interceptor runs - before the `post_analyze_org_policies_with_metadata` interceptor. - """ - return response - - def post_analyze_org_policies_with_metadata(self, response: asset_service.AnalyzeOrgPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for analyze_org_policies - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_analyze_org_policies_with_metadata` - interceptor in new development instead of the `post_analyze_org_policies` interceptor. - When both interceptors are used, this `post_analyze_org_policies_with_metadata` interceptor runs after the - `post_analyze_org_policies` interceptor. The (possibly modified) response returned by - `post_analyze_org_policies` will be passed to - `post_analyze_org_policies_with_metadata`. - """ - return response, metadata - - def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for analyze_org_policy_governed_assets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: - """Post-rpc interceptor for analyze_org_policy_governed_assets - - DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_analyze_org_policy_governed_assets` interceptor runs - before the `post_analyze_org_policy_governed_assets_with_metadata` interceptor. - """ - return response - - def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for analyze_org_policy_governed_assets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_analyze_org_policy_governed_assets_with_metadata` - interceptor in new development instead of the `post_analyze_org_policy_governed_assets` interceptor. - When both interceptors are used, this `post_analyze_org_policy_governed_assets_with_metadata` interceptor runs after the - `post_analyze_org_policy_governed_assets` interceptor. The (possibly modified) response returned by - `post_analyze_org_policy_governed_assets` will be passed to - `post_analyze_org_policy_governed_assets_with_metadata`. - """ - return response, metadata - - def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for analyze_org_policy_governed_containers - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: - """Post-rpc interceptor for analyze_org_policy_governed_containers - - DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_analyze_org_policy_governed_containers` interceptor runs - before the `post_analyze_org_policy_governed_containers_with_metadata` interceptor. - """ - return response - - def post_analyze_org_policy_governed_containers_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for analyze_org_policy_governed_containers - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_analyze_org_policy_governed_containers_with_metadata` - interceptor in new development instead of the `post_analyze_org_policy_governed_containers` interceptor. - When both interceptors are used, this `post_analyze_org_policy_governed_containers_with_metadata` interceptor runs after the - `post_analyze_org_policy_governed_containers` interceptor. The (possibly modified) response returned by - `post_analyze_org_policy_governed_containers` will be passed to - `post_analyze_org_policy_governed_containers_with_metadata`. - """ - return response, metadata - - def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_get_assets_history - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: - """Post-rpc interceptor for batch_get_assets_history - - DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_batch_get_assets_history` interceptor runs - before the `post_batch_get_assets_history_with_metadata` interceptor. - """ - return response - - def post_batch_get_assets_history_with_metadata(self, response: asset_service.BatchGetAssetsHistoryResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_get_assets_history - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_batch_get_assets_history_with_metadata` - interceptor in new development instead of the `post_batch_get_assets_history` interceptor. - When both interceptors are used, this `post_batch_get_assets_history_with_metadata` interceptor runs after the - `post_batch_get_assets_history` interceptor. The (possibly modified) response returned by - `post_batch_get_assets_history` will be passed to - `post_batch_get_assets_history_with_metadata`. - """ - return response, metadata - - def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_get_effective_iam_policies - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: - """Post-rpc interceptor for batch_get_effective_iam_policies - - DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_batch_get_effective_iam_policies` interceptor runs - before the `post_batch_get_effective_iam_policies_with_metadata` interceptor. - """ - return response - - def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_get_effective_iam_policies - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_batch_get_effective_iam_policies_with_metadata` - interceptor in new development instead of the `post_batch_get_effective_iam_policies` interceptor. - When both interceptors are used, this `post_batch_get_effective_iam_policies_with_metadata` interceptor runs after the - `post_batch_get_effective_iam_policies` interceptor. The (possibly modified) response returned by - `post_batch_get_effective_iam_policies` will be passed to - `post_batch_get_effective_iam_policies_with_metadata`. - """ - return response, metadata - - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: - """Post-rpc interceptor for create_feed - - DEPRECATED. Please use the `post_create_feed_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_create_feed` interceptor runs - before the `post_create_feed_with_metadata` interceptor. - """ - return response - - def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_feed - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_create_feed_with_metadata` - interceptor in new development instead of the `post_create_feed` interceptor. - When both interceptors are used, this `post_create_feed_with_metadata` interceptor runs after the - `post_create_feed` interceptor. The (possibly modified) response returned by - `post_create_feed` will be passed to - `post_create_feed_with_metadata`. - """ - return response, metadata - - def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_saved_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: - """Post-rpc interceptor for create_saved_query - - DEPRECATED. Please use the `post_create_saved_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_create_saved_query` interceptor runs - before the `post_create_saved_query_with_metadata` interceptor. - """ - return response - - def post_create_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_saved_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_create_saved_query_with_metadata` - interceptor in new development instead of the `post_create_saved_query` interceptor. - When both interceptors are used, this `post_create_saved_query_with_metadata` interceptor runs after the - `post_create_saved_query` interceptor. The (possibly modified) response returned by - `post_create_saved_query` will be passed to - `post_create_saved_query_with_metadata`. - """ - return response, metadata - - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_saved_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for export_assets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_assets - - DEPRECATED. Please use the `post_export_assets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_export_assets` interceptor runs - before the `post_export_assets_with_metadata` interceptor. - """ - return response - - def post_export_assets_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_assets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_export_assets_with_metadata` - interceptor in new development instead of the `post_export_assets` interceptor. - When both interceptors are used, this `post_export_assets_with_metadata` interceptor runs after the - `post_export_assets` interceptor. The (possibly modified) response returned by - `post_export_assets` will be passed to - `post_export_assets_with_metadata`. - """ - return response, metadata - - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: - """Post-rpc interceptor for get_feed - - DEPRECATED. Please use the `post_get_feed_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_get_feed` interceptor runs - before the `post_get_feed_with_metadata` interceptor. - """ - return response - - def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_feed - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_get_feed_with_metadata` - interceptor in new development instead of the `post_get_feed` interceptor. - When both interceptors are used, this `post_get_feed_with_metadata` interceptor runs after the - `post_get_feed` interceptor. The (possibly modified) response returned by - `post_get_feed` will be passed to - `post_get_feed_with_metadata`. - """ - return response, metadata - - def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_saved_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: - """Post-rpc interceptor for get_saved_query - - DEPRECATED. Please use the `post_get_saved_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_get_saved_query` interceptor runs - before the `post_get_saved_query_with_metadata` interceptor. - """ - return response - - def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_saved_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_get_saved_query_with_metadata` - interceptor in new development instead of the `post_get_saved_query` interceptor. - When both interceptors are used, this `post_get_saved_query_with_metadata` interceptor runs after the - `post_get_saved_query` interceptor. The (possibly modified) response returned by - `post_get_saved_query` will be passed to - `post_get_saved_query_with_metadata`. - """ - return response, metadata - - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_assets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: - """Post-rpc interceptor for list_assets - - DEPRECATED. Please use the `post_list_assets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_list_assets` interceptor runs - before the `post_list_assets_with_metadata` interceptor. - """ - return response - - def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_assets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_list_assets_with_metadata` - interceptor in new development instead of the `post_list_assets` interceptor. - When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the - `post_list_assets` interceptor. The (possibly modified) response returned by - `post_list_assets` will be passed to - `post_list_assets_with_metadata`. - """ - return response, metadata - - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_feeds - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: - """Post-rpc interceptor for list_feeds - - DEPRECATED. Please use the `post_list_feeds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_list_feeds` interceptor runs - before the `post_list_feeds_with_metadata` interceptor. - """ - return response - - def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_feeds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_list_feeds_with_metadata` - interceptor in new development instead of the `post_list_feeds` interceptor. - When both interceptors are used, this `post_list_feeds_with_metadata` interceptor runs after the - `post_list_feeds` interceptor. The (possibly modified) response returned by - `post_list_feeds` will be passed to - `post_list_feeds_with_metadata`. - """ - return response, metadata - - def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_saved_queries - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: - """Post-rpc interceptor for list_saved_queries - - DEPRECATED. Please use the `post_list_saved_queries_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_list_saved_queries` interceptor runs - before the `post_list_saved_queries_with_metadata` interceptor. - """ - return response - - def post_list_saved_queries_with_metadata(self, response: asset_service.ListSavedQueriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_saved_queries - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_list_saved_queries_with_metadata` - interceptor in new development instead of the `post_list_saved_queries` interceptor. - When both interceptors are used, this `post_list_saved_queries_with_metadata` interceptor runs after the - `post_list_saved_queries` interceptor. The (possibly modified) response returned by - `post_list_saved_queries` will be passed to - `post_list_saved_queries_with_metadata`. - """ - return response, metadata - - def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for query_assets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: - """Post-rpc interceptor for query_assets - - DEPRECATED. Please use the `post_query_assets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_query_assets` interceptor runs - before the `post_query_assets_with_metadata` interceptor. - """ - return response - - def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for query_assets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_query_assets_with_metadata` - interceptor in new development instead of the `post_query_assets` interceptor. - When both interceptors are used, this `post_query_assets_with_metadata` interceptor runs after the - `post_query_assets` interceptor. The (possibly modified) response returned by - `post_query_assets` will be passed to - `post_query_assets_with_metadata`. - """ - return response, metadata - - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_all_iam_policies - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: - """Post-rpc interceptor for search_all_iam_policies - - DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_search_all_iam_policies` interceptor runs - before the `post_search_all_iam_policies_with_metadata` interceptor. - """ - return response - - def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_all_iam_policies - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_search_all_iam_policies_with_metadata` - interceptor in new development instead of the `post_search_all_iam_policies` interceptor. - When both interceptors are used, this `post_search_all_iam_policies_with_metadata` interceptor runs after the - `post_search_all_iam_policies` interceptor. The (possibly modified) response returned by - `post_search_all_iam_policies` will be passed to - `post_search_all_iam_policies_with_metadata`. - """ - return response, metadata - - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_all_resources - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: - """Post-rpc interceptor for search_all_resources - - DEPRECATED. Please use the `post_search_all_resources_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_search_all_resources` interceptor runs - before the `post_search_all_resources_with_metadata` interceptor. - """ - return response - - def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_all_resources - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_search_all_resources_with_metadata` - interceptor in new development instead of the `post_search_all_resources` interceptor. - When both interceptors are used, this `post_search_all_resources_with_metadata` interceptor runs after the - `post_search_all_resources` interceptor. The (possibly modified) response returned by - `post_search_all_resources` will be passed to - `post_search_all_resources_with_metadata`. - """ - return response, metadata - - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: - """Post-rpc interceptor for update_feed - - DEPRECATED. Please use the `post_update_feed_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_update_feed` interceptor runs - before the `post_update_feed_with_metadata` interceptor. - """ - return response - - def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_feed - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_update_feed_with_metadata` - interceptor in new development instead of the `post_update_feed` interceptor. - When both interceptors are used, this `post_update_feed_with_metadata` interceptor runs after the - `post_update_feed` interceptor. The (possibly modified) response returned by - `post_update_feed` will be passed to - `post_update_feed_with_metadata`. - """ - return response, metadata - - def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_saved_query - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: - """Post-rpc interceptor for update_saved_query - - DEPRECATED. Please use the `post_update_saved_query_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_update_saved_query` interceptor runs - before the `post_update_saved_query_with_metadata` interceptor. - """ - return response - - def post_update_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_saved_query - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_update_saved_query_with_metadata` - interceptor in new development instead of the `post_update_saved_query` interceptor. - When both interceptors are used, this `post_update_saved_query_with_metadata` interceptor runs after the - `post_update_saved_query` interceptor. The (possibly modified) response returned by - `post_update_saved_query` will be passed to - `post_update_saved_query_with_metadata`. - """ - return response, metadata - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AssetServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AssetServiceRestInterceptor - - -class AssetServiceRestTransport(_BaseAssetServiceRestTransport): - """REST backend synchronous transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AssetServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _AnalyzeIamPolicy(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.AnalyzeIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.AnalyzeIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeIamPolicyResponse: - r"""Call the analyze iam policy method over HTTP. - - Args: - request (~.asset_service.AnalyzeIamPolicyRequest): - The request object. A request message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeIamPolicyResponse: - A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.AnalyzeIamPolicyResponse() - pb_resp = asset_service.AnalyzeIamPolicyResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_analyze_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _AnalyzeIamPolicyLongrunning(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.AnalyzeIamPolicyLongrunning") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the analyze iam policy - longrunning method over HTTP. - - Args: - request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): - The request object. A request message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() - - request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeIamPolicyLongrunning", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeIamPolicyLongrunning", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _AnalyzeMove(_BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.AnalyzeMove") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.AnalyzeMoveRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeMoveResponse: - r"""Call the analyze move method over HTTP. - - Args: - request (~.asset_service.AnalyzeMoveRequest): - The request object. The request message for performing - resource move analysis. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeMoveResponse: - The response message for resource - move analysis. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() - - request, metadata = self._interceptor.pre_analyze_move(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeMove", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.AnalyzeMoveResponse() - pb_resp = asset_service.AnalyzeMoveResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_analyze_move(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_move_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.AnalyzeMoveResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_move", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeMove", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _AnalyzeOrgPolicies(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.AnalyzeOrgPolicies") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.AnalyzeOrgPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPoliciesResponse: - r"""Call the analyze org policies method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPoliciesRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPoliciesResponse: - The response message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() - - request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeOrgPolicies", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.AnalyzeOrgPoliciesResponse() - pb_resp = asset_service.AnalyzeOrgPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_analyze_org_policies(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeOrgPolicies", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _AnalyzeOrgPolicyGovernedAssets(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedAssets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: - r"""Call the analyze org policy - governed assets method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() - - request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeOrgPolicyGovernedAssets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - pb_resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeOrgPolicyGovernedAssets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _AnalyzeOrgPolicyGovernedContainers(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedContainers") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: - r"""Call the analyze org policy - governed containers method over HTTP. - - Args: - request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - The request object. A request message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: - The response message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() - - request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeOrgPolicyGovernedContainers", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - pb_resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "AnalyzeOrgPolicyGovernedContainers", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BatchGetAssetsHistory(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.BatchGetAssetsHistory") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.BatchGetAssetsHistoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetAssetsHistoryResponse: - r"""Call the batch get assets history method over HTTP. - - Args: - request (~.asset_service.BatchGetAssetsHistoryRequest): - The request object. Batch get assets history request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.BatchGetAssetsHistoryResponse: - Batch get assets history response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() - - request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "BatchGetAssetsHistory", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.BatchGetAssetsHistoryResponse() - pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_get_assets_history(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "BatchGetAssetsHistory", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _BatchGetEffectiveIamPolicies(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.BatchGetEffectiveIamPolicies") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: - r"""Call the batch get effective iam - policies method over HTTP. - - Args: - request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): - The request object. A request message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.BatchGetEffectiveIamPoliciesResponse: - A response message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() - - request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "BatchGetEffectiveIamPolicies", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.BatchGetEffectiveIamPoliciesResponse() - pb_resp = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_get_effective_iam_policies(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_get_effective_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "BatchGetEffectiveIamPolicies", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.CreateFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: - r"""Call the create feed method over HTTP. - - Args: - request (~.asset_service.CreateFeedRequest): - The request object. Create asset feed request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() - - request, metadata = self._interceptor.pre_create_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "CreateFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_feed(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.Feed.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.create_feed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "CreateFeed", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateSavedQuery(_BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.CreateSavedQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.CreateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: - r"""Call the create saved query method over HTTP. - - Args: - request (~.asset_service.CreateSavedQueryRequest): - The request object. Request to create a saved query. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() - - request, metadata = self._interceptor.pre_create_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "CreateSavedQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SavedQuery() - pb_resp = asset_service.SavedQuery.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_saved_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SavedQuery.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.create_saved_query", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "CreateSavedQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.DeleteFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete feed method over HTTP. - - Args: - request (~.asset_service.DeleteFeedRequest): - The request object. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() - - request, metadata = self._interceptor.pre_delete_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "DeleteFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteSavedQuery(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.DeleteSavedQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.DeleteSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete saved query method over HTTP. - - Args: - request (~.asset_service.DeleteSavedQueryRequest): - The request object. Request to delete a saved query. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() - - request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "DeleteSavedQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _ExportAssets(_BaseAssetServiceRestTransport._BaseExportAssets, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.ExportAssets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.ExportAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the export assets method over HTTP. - - Args: - request (~.asset_service.ExportAssetsRequest): - The request object. Export asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() - - request, metadata = self._interceptor.pre_export_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseExportAssets._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ExportAssets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_assets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.export_assets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ExportAssets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetFeed(_BaseAssetServiceRestTransport._BaseGetFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.GetFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.GetFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: - r"""Call the get feed method over HTTP. - - Args: - request (~.asset_service.GetFeedRequest): - The request object. Get asset feed request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() - - request, metadata = self._interceptor.pre_get_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "GetFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_feed(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.Feed.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.get_feed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "GetFeed", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetSavedQuery(_BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.GetSavedQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.GetSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: - r"""Call the get saved query method over HTTP. - - Args: - request (~.asset_service.GetSavedQueryRequest): - The request object. Request to get a saved query. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() - - request, metadata = self._interceptor.pre_get_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "GetSavedQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SavedQuery() - pb_resp = asset_service.SavedQuery.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_saved_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SavedQuery.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.get_saved_query", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "GetSavedQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.ListAssets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListAssetsResponse: - r"""Call the list assets method over HTTP. - - Args: - request (~.asset_service.ListAssetsRequest): - The request object. ListAssets request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.ListAssetsResponse: - ListAssets response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() - - request, metadata = self._interceptor.pre_list_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ListAssets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.ListAssetsResponse() - pb_resp = asset_service.ListAssetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_assets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.ListAssetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.list_assets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ListAssets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.ListFeeds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListFeedsResponse: - r"""Call the list feeds method over HTTP. - - Args: - request (~.asset_service.ListFeedsRequest): - The request object. List asset feeds request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.ListFeedsResponse: - - """ - - http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() - - request, metadata = self._interceptor.pre_list_feeds(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ListFeeds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.ListFeedsResponse() - pb_resp = asset_service.ListFeedsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_feeds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.ListFeedsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.list_feeds", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ListFeeds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListSavedQueries(_BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.ListSavedQueries") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.ListSavedQueriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListSavedQueriesResponse: - r"""Call the list saved queries method over HTTP. - - Args: - request (~.asset_service.ListSavedQueriesRequest): - The request object. Request to list saved queries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.ListSavedQueriesResponse: - Response of listing saved queries. - """ - - http_options = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() - - request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ListSavedQueries", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.ListSavedQueriesResponse() - pb_resp = asset_service.ListSavedQueriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_saved_queries(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_saved_queries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.ListSavedQueriesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.list_saved_queries", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "ListSavedQueries", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _QueryAssets(_BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.QueryAssets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.QueryAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.QueryAssetsResponse: - r"""Call the query assets method over HTTP. - - Args: - request (~.asset_service.QueryAssetsRequest): - The request object. QueryAssets request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.QueryAssetsResponse: - QueryAssets response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() - - request, metadata = self._interceptor.pre_query_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseQueryAssets._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "QueryAssets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.QueryAssetsResponse() - pb_resp = asset_service.QueryAssetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_query_assets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_query_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.QueryAssetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.query_assets", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "QueryAssets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.SearchAllIamPolicies") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllIamPoliciesResponse: - r"""Call the search all iam policies method over HTTP. - - Args: - request (~.asset_service.SearchAllIamPoliciesRequest): - The request object. Search all IAM policies request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SearchAllIamPoliciesResponse: - Search all IAM policies response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() - - request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "SearchAllIamPolicies", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SearchAllIamPoliciesResponse() - pb_resp = asset_service.SearchAllIamPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_all_iam_policies(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "SearchAllIamPolicies", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.SearchAllResources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllResourcesResponse: - r"""Call the search all resources method over HTTP. - - Args: - request (~.asset_service.SearchAllResourcesRequest): - The request object. Search all resources request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SearchAllResourcesResponse: - Search all resources response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() - - request, metadata = self._interceptor.pre_search_all_resources(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "SearchAllResources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SearchAllResourcesResponse() - pb_resp = asset_service.SearchAllResourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_all_resources(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SearchAllResourcesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_resources", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "SearchAllResources", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.UpdateFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: - r"""Call the update feed method over HTTP. - - Args: - request (~.asset_service.UpdateFeedRequest): - The request object. Update asset feed request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() - - request, metadata = self._interceptor.pre_update_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "UpdateFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_feed(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.Feed.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.update_feed", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "UpdateFeed", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateSavedQuery(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.UpdateSavedQuery") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.UpdateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: - r"""Call the update saved query method over HTTP. - - Args: - request (~.asset_service.UpdateSavedQueryRequest): - The request object. Request to update a saved query. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SavedQuery: - A saved query which can be shared - with others or used later. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() - - request, metadata = self._interceptor.pre_update_saved_query(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "UpdateSavedQuery", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SavedQuery() - pb_resp = asset_service.SavedQuery.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_saved_query(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SavedQuery.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceClient.update_saved_query", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "UpdateSavedQuery", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore - - @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore - - @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore - - @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore - - @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1.AssetServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/rest_base.py deleted file mode 100644 index f84545087131..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ /dev/null @@ -1,1039 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.asset_v1.types import asset_service -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAssetServiceRestTransport(AssetServiceTransport): - """Base REST backend transport for AssetService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseAnalyzeIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "analysisQuery" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeIamPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseAnalyzeIamPolicyLongrunning: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseAnalyzeMove: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "destinationParent" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=*/*}:analyzeMove', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeMoveRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseAnalyzeOrgPolicies: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeOrgPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseAnalyzeOrgPolicyGovernedAssets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseAnalyzeOrgPolicyGovernedContainers: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBatchGetAssetsHistory: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseBatchGetEffectiveIamPolicies: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "names" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.CreateFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateSavedQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "savedQueryId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/savedQueries', - 'body': 'saved_query', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.CreateSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.DeleteFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteSavedQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.DeleteSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportAssets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:exportAssets', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.ExportAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.GetFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetSavedQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.GetSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAssets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.ListAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListFeeds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.ListFeedsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListSavedQueries: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/savedQueries', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.ListSavedQueriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseQueryAssets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:queryAssets', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.QueryAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchAllIamPolicies: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.SearchAllIamPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchAllResources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllResources', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.SearchAllResourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{feed.name=*/*/feeds/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.UpdateFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateSavedQuery: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', - 'body': 'saved_query', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.UpdateSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/__init__.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/__init__.py deleted file mode 100644 index ba9f7e03710b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/__init__.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .asset_service import ( - AnalyzeIamPolicyLongrunningMetadata, - AnalyzeIamPolicyLongrunningRequest, - AnalyzeIamPolicyLongrunningResponse, - AnalyzeIamPolicyRequest, - AnalyzeIamPolicyResponse, - AnalyzeMoveRequest, - AnalyzeMoveResponse, - AnalyzeOrgPoliciesRequest, - AnalyzeOrgPoliciesResponse, - AnalyzeOrgPolicyGovernedAssetsRequest, - AnalyzeOrgPolicyGovernedAssetsResponse, - AnalyzeOrgPolicyGovernedContainersRequest, - AnalyzeOrgPolicyGovernedContainersResponse, - AnalyzerOrgPolicy, - AnalyzerOrgPolicyConstraint, - BatchGetAssetsHistoryRequest, - BatchGetAssetsHistoryResponse, - BatchGetEffectiveIamPoliciesRequest, - BatchGetEffectiveIamPoliciesResponse, - BigQueryDestination, - CreateFeedRequest, - CreateSavedQueryRequest, - DeleteFeedRequest, - DeleteSavedQueryRequest, - ExportAssetsRequest, - ExportAssetsResponse, - Feed, - FeedOutputConfig, - GcsDestination, - GcsOutputResult, - GetFeedRequest, - GetSavedQueryRequest, - IamPolicyAnalysisOutputConfig, - IamPolicyAnalysisQuery, - ListAssetsRequest, - ListAssetsResponse, - ListFeedsRequest, - ListFeedsResponse, - ListSavedQueriesRequest, - ListSavedQueriesResponse, - MoveAnalysis, - MoveAnalysisResult, - MoveImpact, - OutputConfig, - OutputResult, - PartitionSpec, - PubsubDestination, - QueryAssetsOutputConfig, - QueryAssetsRequest, - QueryAssetsResponse, - QueryResult, - SavedQuery, - SearchAllIamPoliciesRequest, - SearchAllIamPoliciesResponse, - SearchAllResourcesRequest, - SearchAllResourcesResponse, - TableFieldSchema, - TableSchema, - UpdateFeedRequest, - UpdateSavedQueryRequest, - ContentType, -) -from .assets import ( - Asset, - AttachedResource, - ConditionEvaluation, - EffectiveTagDetails, - IamPolicyAnalysisResult, - IamPolicyAnalysisState, - IamPolicySearchResult, - RelatedAsset, - RelatedAssets, - RelatedResource, - RelatedResources, - RelationshipAttributes, - Resource, - ResourceSearchResult, - Tag, - TemporalAsset, - TimeWindow, - VersionedResource, -) - -__all__ = ( - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'EffectiveTagDetails', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'Tag', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', -) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/asset_service.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/asset_service.py deleted file mode 100644 index 1a1331b53084..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/asset_service.py +++ /dev/null @@ -1,4175 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.asset_v1.types import assets as gca_assets -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1', - manifest={ - 'ContentType', - 'AnalyzeIamPolicyLongrunningMetadata', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'UpdateFeedRequest', - 'DeleteFeedRequest', - 'OutputConfig', - 'OutputResult', - 'GcsOutputResult', - 'GcsDestination', - 'BigQueryDestination', - 'PartitionSpec', - 'PubsubDestination', - 'FeedOutputConfig', - 'Feed', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'IamPolicyAnalysisQuery', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'IamPolicyAnalysisOutputConfig', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'SavedQuery', - 'CreateSavedQueryRequest', - 'GetSavedQueryRequest', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'UpdateSavedQueryRequest', - 'DeleteSavedQueryRequest', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'TableSchema', - 'TableFieldSchema', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - }, -) - - -class ContentType(proto.Enum): - r"""Asset content type. - - Values: - CONTENT_TYPE_UNSPECIFIED (0): - Unspecified content type. - RESOURCE (1): - Resource metadata. - IAM_POLICY (2): - The actual IAM policy set on a resource. - ORG_POLICY (4): - The organization policy set on an asset. - ACCESS_POLICY (5): - The Access Context Manager policy set on an - asset. - OS_INVENTORY (6): - The runtime OS Inventory information. - RELATIONSHIP (7): - The related resources. - """ - CONTENT_TYPE_UNSPECIFIED = 0 - RESOURCE = 1 - IAM_POLICY = 2 - ORG_POLICY = 4 - ACCESS_POLICY = 5 - OS_INVENTORY = 6 - RELATIONSHIP = 7 - - -class AnalyzeIamPolicyLongrunningMetadata(proto.Message): - r"""Represents the metadata of the longrunning operation for the - AnalyzeIamPolicyLongrunning RPC. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - - -class ExportAssetsRequest(proto.Message): - r"""Export asset request. - - Attributes: - parent (str): - Required. The relative name of the root - asset. This can only be an organization number - (such as "organizations/123"), a project ID - (such as "projects/my-project-id"), or a project - number (such as "projects/12345"), or a folder - number (such as "folders/123"). - read_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp to take an asset snapshot. This can - only be set to a timestamp between the current - time and the current time minus 35 days - (inclusive). If not specified, the current time - will be used. Due to delays in resource data - collection and indexing, there is a volatile - window during which running the same query may - get different results. - asset_types (MutableSequence[str]): - A list of asset types to take a snapshot for. For example: - "compute.googleapis.com/Disk". - - Regular expressions are also supported. For example: - - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". - - See `RE2 `__ for - all supported regular expression syntax. If the regular - expression does not match any supported asset type, an - INVALID_ARGUMENT error will be returned. - - If specified, only matching assets will be returned, - otherwise, it will snapshot all asset types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types. - content_type (google.cloud.asset_v1.types.ContentType): - Asset content type. If not specified, no - content but the asset name will be returned. - output_config (google.cloud.asset_v1.types.OutputConfig): - Required. Output configuration indicating - where the results will be output to. - relationship_types (MutableSequence[str]): - A list of relationship types to export, for example: - ``INSTANCE_TO_INSTANCEGROUP``. This field should only be - specified if content_type=RELATIONSHIP. - - - If specified: it snapshots specified relationships. It - returns an error if any of the [relationship_types] - doesn't belong to the supported relationship types of the - [asset_types] or if any of the [asset_types] doesn't - belong to the source types of the [relationship_types]. - - Otherwise: it snapshots the supported relationships for - all [asset_types] or returns an error if any of the - [asset_types] has no relationship support. An unspecified - asset types field means all supported asset_types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types and relationship types. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - content_type: 'ContentType' = proto.Field( - proto.ENUM, - number=4, - enum='ContentType', - ) - output_config: 'OutputConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='OutputConfig', - ) - relationship_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class ExportAssetsResponse(proto.Message): - r"""The export asset response. This message is returned by the - [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] - method in the returned - [google.longrunning.Operation.response][google.longrunning.Operation.response] - field. - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Time the snapshot was taken. - output_config (google.cloud.asset_v1.types.OutputConfig): - Output configuration indicating where the - results were output to. - output_result (google.cloud.asset_v1.types.OutputResult): - Output result indicating where the assets were exported to. - For example, a set of actual Cloud Storage object URIs where - the assets are exported to. The URIs can be different from - what [output_config] has specified, as the service will - split the output object into multiple ones once it exceeds a - single Cloud Storage object limit. - """ - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - output_config: 'OutputConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='OutputConfig', - ) - output_result: 'OutputResult' = proto.Field( - proto.MESSAGE, - number=3, - message='OutputResult', - ) - - -class ListAssetsRequest(proto.Message): - r"""ListAssets request. - - Attributes: - parent (str): - Required. Name of the organization, folder, or project the - assets belong to. Format: - "organizations/[organization-number]" (such as - "organizations/123"), "projects/[project-id]" (such as - "projects/my-project-id"), "projects/[project-number]" (such - as "projects/12345"), or "folders/[folder-number]" (such as - "folders/12345"). - read_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp to take an asset snapshot. This can - only be set to a timestamp between the current - time and the current time minus 35 days - (inclusive). If not specified, the current time - will be used. Due to delays in resource data - collection and indexing, there is a volatile - window during which running the same query may - get different results. - asset_types (MutableSequence[str]): - A list of asset types to take a snapshot for. For example: - "compute.googleapis.com/Disk". - - Regular expression is also supported. For example: - - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". - - See `RE2 `__ for - all supported regular expression syntax. If the regular - expression does not match any supported asset type, an - INVALID_ARGUMENT error will be returned. - - If specified, only matching assets will be returned, - otherwise, it will snapshot all asset types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types. - content_type (google.cloud.asset_v1.types.ContentType): - Asset content type. If not specified, no - content but the asset name will be returned. - page_size (int): - The maximum number of assets to be returned - in a single response. Default is 100, minimum is - 1, and maximum is 1000. - page_token (str): - The ``next_page_token`` returned from the previous - ``ListAssetsResponse``, or unspecified for the first - ``ListAssetsRequest``. It is a continuation of a prior - ``ListAssets`` call, and the API should return the next page - of assets. - relationship_types (MutableSequence[str]): - A list of relationship types to output, for example: - ``INSTANCE_TO_INSTANCEGROUP``. This field should only be - specified if content_type=RELATIONSHIP. - - - If specified: it snapshots specified relationships. It - returns an error if any of the [relationship_types] - doesn't belong to the supported relationship types of the - [asset_types] or if any of the [asset_types] doesn't - belong to the source types of the [relationship_types]. - - Otherwise: it snapshots the supported relationships for - all [asset_types] or returns an error if any of the - [asset_types] has no relationship support. An unspecified - asset types field means all supported asset_types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types and relationship types. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - content_type: 'ContentType' = proto.Field( - proto.ENUM, - number=4, - enum='ContentType', - ) - page_size: int = proto.Field( - proto.INT32, - number=5, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - relationship_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - - -class ListAssetsResponse(proto.Message): - r"""ListAssets response. - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Time the snapshot was taken. - assets (MutableSequence[google.cloud.asset_v1.types.Asset]): - Assets. - next_page_token (str): - Token to retrieve the next page of results. - It expires 72 hours after the page token for the - first page is generated. Set to empty if there - are no remaining results. - """ - - @property - def raw_page(self): - return self - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - assets: MutableSequence[gca_assets.Asset] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gca_assets.Asset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BatchGetAssetsHistoryRequest(proto.Message): - r"""Batch get assets history request. - - Attributes: - parent (str): - Required. The relative name of the root - asset. It can only be an organization number - (such as "organizations/123"), a project ID - (such as "projects/my-project-id")", or a - project number (such as "projects/12345"). - asset_names (MutableSequence[str]): - A list of the full names of the assets. See: - https://cloud.google.com/asset-inventory/docs/resource-name-format - Example: - - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - - The request becomes a no-op if the asset name list is empty, - and the max size of the asset name list is 100 in one - request. - content_type (google.cloud.asset_v1.types.ContentType): - Optional. The content type. - read_time_window (google.cloud.asset_v1.types.TimeWindow): - Optional. The time window for the asset history. Both - start_time and end_time are optional and if set, it must be - after the current time minus 35 days. If end_time is not - set, it is default to current timestamp. If start_time is - not set, the snapshot of the assets at end_time will be - returned. The returned results contain all temporal assets - whose time window overlap with read_time_window. - relationship_types (MutableSequence[str]): - Optional. A list of relationship types to output, for - example: ``INSTANCE_TO_INSTANCEGROUP``. This field should - only be specified if content_type=RELATIONSHIP. - - - If specified: it outputs specified relationships' history - on the [asset_names]. It returns an error if any of the - [relationship_types] doesn't belong to the supported - relationship types of the [asset_names] or if any of the - [asset_names]'s types doesn't belong to the source types - of the [relationship_types]. - - Otherwise: it outputs the supported relationships' - history on the [asset_names] or returns an error if any - of the [asset_names]'s types has no relationship support. - See `Introduction to Cloud Asset - Inventory `__ - for all supported asset types and relationship types. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - asset_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - content_type: 'ContentType' = proto.Field( - proto.ENUM, - number=3, - enum='ContentType', - ) - read_time_window: gca_assets.TimeWindow = proto.Field( - proto.MESSAGE, - number=4, - message=gca_assets.TimeWindow, - ) - relationship_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class BatchGetAssetsHistoryResponse(proto.Message): - r"""Batch get assets history response. - - Attributes: - assets (MutableSequence[google.cloud.asset_v1.types.TemporalAsset]): - A list of assets with valid time windows. - """ - - assets: MutableSequence[gca_assets.TemporalAsset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_assets.TemporalAsset, - ) - - -class CreateFeedRequest(proto.Message): - r"""Create asset feed request. - - Attributes: - parent (str): - Required. The name of the - project/folder/organization where this feed - should be created in. It can only be an - organization number (such as - "organizations/123"), a folder number (such as - "folders/123"), a project ID (such as - "projects/my-project-id"), or a project number - (such as "projects/12345"). - feed_id (str): - Required. This is the client-assigned asset - feed identifier and it needs to be unique under - a specific parent project/folder/organization. - feed (google.cloud.asset_v1.types.Feed): - Required. The feed details. The field ``name`` must be empty - and it will be generated in the format of: - projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - feed_id: str = proto.Field( - proto.STRING, - number=2, - ) - feed: 'Feed' = proto.Field( - proto.MESSAGE, - number=3, - message='Feed', - ) - - -class GetFeedRequest(proto.Message): - r"""Get asset feed request. - - Attributes: - name (str): - Required. The name of the Feed and it must be in the format - of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFeedsRequest(proto.Message): - r"""List asset feeds request. - - Attributes: - parent (str): - Required. The parent - project/folder/organization whose feeds are to - be listed. It can only be using - project/folder/organization number (such as - "folders/12345")", or a project ID (such as - "projects/my-project-id"). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFeedsResponse(proto.Message): - r""" - - Attributes: - feeds (MutableSequence[google.cloud.asset_v1.types.Feed]): - A list of feeds. - """ - - feeds: MutableSequence['Feed'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Feed', - ) - - -class UpdateFeedRequest(proto.Message): - r"""Update asset feed request. - - Attributes: - feed (google.cloud.asset_v1.types.Feed): - Required. The new values of feed details. It must match an - existing feed and the field ``name`` must be in the format - of: projects/project_number/feeds/feed_id or - folders/folder_number/feeds/feed_id or - organizations/organization_number/feeds/feed_id. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Only updates the ``feed`` fields indicated by this - mask. The field mask must not be empty, and it must not - contain fields that are immutable or only set by the server. - """ - - feed: 'Feed' = proto.Field( - proto.MESSAGE, - number=1, - message='Feed', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteFeedRequest(proto.Message): - r""" - - Attributes: - name (str): - Required. The name of the feed and it must be in the format - of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OutputConfig(proto.Message): - r"""Output configuration for export assets destination. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.asset_v1.types.GcsDestination): - Destination on Cloud Storage. - - This field is a member of `oneof`_ ``destination``. - bigquery_destination (google.cloud.asset_v1.types.BigQueryDestination): - Destination on BigQuery. The output table - stores the fields in asset Protobuf as columns - in BigQuery. - - This field is a member of `oneof`_ ``destination``. - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - bigquery_destination: 'BigQueryDestination' = proto.Field( - proto.MESSAGE, - number=2, - oneof='destination', - message='BigQueryDestination', - ) - - -class OutputResult(proto.Message): - r"""Output result of export assets. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_result (google.cloud.asset_v1.types.GcsOutputResult): - Export result on Cloud Storage. - - This field is a member of `oneof`_ ``result``. - """ - - gcs_result: 'GcsOutputResult' = proto.Field( - proto.MESSAGE, - number=1, - oneof='result', - message='GcsOutputResult', - ) - - -class GcsOutputResult(proto.Message): - r"""A Cloud Storage output result. - - Attributes: - uris (MutableSequence[str]): - List of URIs of the Cloud Storage objects. Example: - "gs://bucket_name/object_name". - """ - - uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class GcsDestination(proto.Message): - r"""A Cloud Storage location. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - uri (str): - The URI of the Cloud Storage object. It's the same URI that - is used by gsutil. Example: "gs://bucket_name/object_name". - See `Viewing and Editing Object - Metadata `__ - for more information. - - If the specified Cloud Storage object already exists and - there is no - `hold `__, - it will be overwritten with the exported result. - - This field is a member of `oneof`_ ``object_uri``. - uri_prefix (str): - The URI prefix of all generated Cloud Storage objects. - Example: "gs://bucket_name/object_name_prefix". Each object - URI is in format: "gs://bucket_name/object_name_prefix// and - only contains assets for that type. starts from 0. Example: - "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" - is the first shard of output objects containing all - compute.googleapis.com/Disk assets. An INVALID_ARGUMENT - error will be returned if file with the same name - "gs://bucket_name/object_name_prefix" already exists. - - This field is a member of `oneof`_ ``object_uri``. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - oneof='object_uri', - ) - uri_prefix: str = proto.Field( - proto.STRING, - number=2, - oneof='object_uri', - ) - - -class BigQueryDestination(proto.Message): - r"""A BigQuery destination for exporting assets to. - - Attributes: - dataset (str): - Required. The BigQuery dataset in format - "projects/projectId/datasets/datasetId", to which the - snapshot result should be exported. If this dataset does not - exist, the export call returns an INVALID_ARGUMENT error. - Setting the ``contentType`` for ``exportAssets`` determines - the - `schema `__ - of the BigQuery table. Setting - ``separateTablesPerAssetType`` to ``TRUE`` also influences - the schema. - table (str): - Required. The BigQuery table to which the - snapshot result should be written. If this table - does not exist, a new table with the given name - will be created. - force (bool): - If the destination table already exists and this flag is - ``TRUE``, the table will be overwritten by the contents of - assets snapshot. If the flag is ``FALSE`` or unset and the - destination table already exists, the export call returns an - INVALID_ARGUMEMT error. - partition_spec (google.cloud.asset_v1.types.PartitionSpec): - [partition_spec] determines whether to export to partitioned - table(s) and how to partition the data. - - If [partition_spec] is unset or - [partition_spec.partition_key] is unset or - ``PARTITION_KEY_UNSPECIFIED``, the snapshot results will be - exported to non-partitioned table(s). [force] will decide - whether to overwrite existing table(s). - - If [partition_spec] is specified. First, the snapshot - results will be written to partitioned table(s) with two - additional timestamp columns, readTime and requestTime, one - of which will be the partition key. Secondly, in the case - when any destination table already exists, it will first try - to update existing table's schema as necessary by appending - additional columns. Then, if [force] is ``TRUE``, the - corresponding partition will be overwritten by the snapshot - results (data in different partitions will remain intact); - if [force] is unset or ``FALSE``, it will append the data. - An error will be returned if the schema update or data - appension fails. - separate_tables_per_asset_type (bool): - If this flag is ``TRUE``, the snapshot results will be - written to one or multiple tables, each of which contains - results of one asset type. The [force] and [partition_spec] - fields will apply to each of them. - - Field [table] will be concatenated with "*" and the asset - type names (see - https://cloud.google.com/asset-inventory/docs/supported-asset-types - for supported asset types) to construct per-asset-type table - names, in which all non-alphanumeric characters like "." and - "/" will be substituted by "*". Example: if field [table] is - "mytable" and snapshot results contain - "storage.googleapis.com/Bucket" assets, the corresponding - table name will be "mytable_storage_googleapis_com_Bucket". - If any of these tables does not exist, a new table with the - concatenated name will be created. - - When [content_type] in the ExportAssetsRequest is - ``RESOURCE``, the schema of each table will include - RECORD-type columns mapped to the nested fields in the - Asset.resource.data field of that asset type (up to the 15 - nested level BigQuery supports - (https://cloud.google.com/bigquery/docs/nested-repeated#limitations)). - The fields in >15 nested levels will be stored in JSON - format string as a child column of its parent RECORD column. - - If error occurs when exporting to any table, the whole - export call will return an error but the export results that - already succeed will persist. Example: if exporting to - table_type_A succeeds when exporting to table_type_B fails - during one export call, the results in table_type_A will - persist and there will not be partial results persisting in - a table. - """ - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - table: str = proto.Field( - proto.STRING, - number=2, - ) - force: bool = proto.Field( - proto.BOOL, - number=3, - ) - partition_spec: 'PartitionSpec' = proto.Field( - proto.MESSAGE, - number=4, - message='PartitionSpec', - ) - separate_tables_per_asset_type: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class PartitionSpec(proto.Message): - r"""Specifications of BigQuery partitioned table as export - destination. - - Attributes: - partition_key (google.cloud.asset_v1.types.PartitionSpec.PartitionKey): - The partition key for BigQuery partitioned - table. - """ - class PartitionKey(proto.Enum): - r"""This enum is used to determine the partition key column when - exporting assets to BigQuery partitioned table(s). Note that, if the - partition key is a timestamp column, the actual partition is based - on its date value (expressed in UTC. see details in - https://cloud.google.com/bigquery/docs/partitioned-tables#date_timestamp_partitioned_tables). - - Values: - PARTITION_KEY_UNSPECIFIED (0): - Unspecified partition key. If used, it means - using non-partitioned table. - READ_TIME (1): - The time when the snapshot is taken. If specified as - partition key, the result table(s) is partitoned by the - additional timestamp column, readTime. If [read_time] in - ExportAssetsRequest is specified, the readTime column's - value will be the same as it. Otherwise, its value will be - the current time that is used to take the snapshot. - REQUEST_TIME (2): - The time when the request is received and - started to be processed. If specified as - partition key, the result table(s) is partitoned - by the requestTime column, an additional - timestamp column representing when the request - was received. - """ - PARTITION_KEY_UNSPECIFIED = 0 - READ_TIME = 1 - REQUEST_TIME = 2 - - partition_key: PartitionKey = proto.Field( - proto.ENUM, - number=1, - enum=PartitionKey, - ) - - -class PubsubDestination(proto.Message): - r"""A Pub/Sub destination. - - Attributes: - topic (str): - The name of the Pub/Sub topic to publish to. Example: - ``projects/PROJECT_ID/topics/TOPIC_ID``. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FeedOutputConfig(proto.Message): - r"""Output configuration for asset feed destination. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - pubsub_destination (google.cloud.asset_v1.types.PubsubDestination): - Destination on Pub/Sub. - - This field is a member of `oneof`_ ``destination``. - """ - - pubsub_destination: 'PubsubDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='PubsubDestination', - ) - - -class Feed(proto.Message): - r"""An asset feed used to export asset updates to a destinations. - An asset feed filter controls what updates are exported. The - asset feed must be created within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. - - Attributes: - name (str): - Required. The format will be - projects/{project_number}/feeds/{client-assigned_feed_identifier} - or - folders/{folder_number}/feeds/{client-assigned_feed_identifier} - or - organizations/{organization_number}/feeds/{client-assigned_feed_identifier} - - The client-assigned feed identifier must be unique within - the parent project/folder/organization. - asset_names (MutableSequence[str]): - A list of the full names of the assets to receive updates. - You must specify either or both of asset_names and - asset_types. Only asset updates matching specified - asset_names or asset_types are exported to the feed. - Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - For a list of the full names for supported asset types, see - `Resource name - format `__. - asset_types (MutableSequence[str]): - A list of types of the assets to receive updates. You must - specify either or both of asset_names and asset_types. Only - asset updates matching specified asset_names or asset_types - are exported to the feed. Example: - ``"compute.googleapis.com/Disk"`` - - For a list of all supported asset types, see `Supported - asset - types `__. - content_type (google.cloud.asset_v1.types.ContentType): - Asset content type. If not specified, no - content but the asset name and type will be - returned. - feed_output_config (google.cloud.asset_v1.types.FeedOutputConfig): - Required. Feed output configuration defining - where the asset updates are published to. - condition (google.type.expr_pb2.Expr): - A condition which determines whether an asset update should - be published. If specified, an asset will be returned only - when the expression evaluates to true. When set, - ``expression`` field in the ``Expr`` must be a valid [CEL - expression] (https://github.com/google/cel-spec) on a - TemporalAsset with name ``temporal_asset``. Example: a Feed - with expression ("temporal_asset.deleted == true") will only - publish Asset deletions. Other fields of ``Expr`` are - optional. - - See our `user - guide `__ - for detailed instructions. - relationship_types (MutableSequence[str]): - A list of relationship types to output, for example: - ``INSTANCE_TO_INSTANCEGROUP``. This field should only be - specified if content_type=RELATIONSHIP. - - - If specified: it outputs specified relationship updates - on the [asset_names] or the [asset_types]. It returns an - error if any of the [relationship_types] doesn't belong - to the supported relationship types of the [asset_names] - or [asset_types], or any of the [asset_names] or the - [asset_types] doesn't belong to the source types of the - [relationship_types]. - - Otherwise: it outputs the supported relationships of the - types of [asset_names] and [asset_types] or returns an - error if any of the [asset_names] or the [asset_types] - has no replationship support. See `Introduction to Cloud - Asset - Inventory `__ - for all supported asset types and relationship types. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - content_type: 'ContentType' = proto.Field( - proto.ENUM, - number=4, - enum='ContentType', - ) - feed_output_config: 'FeedOutputConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='FeedOutputConfig', - ) - condition: expr_pb2.Expr = proto.Field( - proto.MESSAGE, - number=6, - message=expr_pb2.Expr, - ) - relationship_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - - -class SearchAllResourcesRequest(proto.Message): - r"""Search all resources request. - - Attributes: - scope (str): - Required. A scope can be a project, a folder, or an - organization. The search is limited to the resources within - the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllResources`` `__ - permission on the desired scope. - - The allowed values are: - - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - query (str): - Optional. The query statement. See `how to construct a - query `__ - for more information. If not specified or empty, it will - search all the resources within the specified ``scope``. - - Examples: - - - ``name:Important`` to find Google Cloud resources whose - name contains ``Important`` as a word. - - ``name=Important`` to find the Google Cloud resource - whose name is exactly ``Important``. - - ``displayName:Impor*`` to find Google Cloud resources - whose display name contains ``Impor`` as a prefix of any - word in the field. - - ``location:us-west*`` to find Google Cloud resources - whose location contains both ``us`` and ``west`` as - prefixes. - - ``labels:prod`` to find Google Cloud resources whose - labels contain ``prod`` as a key or value. - - ``labels.env:prod`` to find Google Cloud resources that - have a label ``env`` and its value is ``prod``. - - ``labels.env:*`` to find Google Cloud resources that have - a label ``env``. - - ``tagKeys:env`` to find Google Cloud resources that have - directly attached tags where the - ```TagKey.namespacedName`` `__ - contains ``env``. - - ``tagValues:prod*`` to find Google Cloud resources that - have directly attached tags where the - ```TagValue.namespacedName`` `__ - contains a word prefixed by ``prod``. - - ``tagValueIds=tagValues/123`` to find Google Cloud - resources that have directly attached tags where the - ```TagValue.name`` `__ - is exactly ``tagValues/123``. - - ``effectiveTagKeys:env`` to find Google Cloud resources - that have directly attached or inherited tags where the - ```TagKey.namespacedName`` `__ - contains ``env``. - - ``effectiveTagValues:prod*`` to find Google Cloud - resources that have directly attached or inherited tags - where the - ```TagValue.namespacedName`` `__ - contains a word prefixed by ``prod``. - - ``effectiveTagValueIds=tagValues/123`` to find Google - Cloud resources that have directly attached or inherited - tags where the - ```TagValue.name`` `__ - is exactly ``tagValues/123``. - - ``kmsKey:key`` to find Google Cloud resources encrypted - with a customer-managed encryption key whose name - contains ``key`` as a word. This field is deprecated. Use - the ``kmsKeys`` field to retrieve Cloud KMS key - information. - - ``kmsKeys:key`` to find Google Cloud resources encrypted - with customer-managed encryption keys whose name contains - the word ``key``. - - ``relationships:instance-group-1`` to find Google Cloud - resources that have relationships with - ``instance-group-1`` in the related resource name. - - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find - Compute Engine instances that have relationships of type - ``INSTANCE_TO_INSTANCEGROUP``. - - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` - to find Compute Engine instances that have relationships - with ``instance-group-1`` in the Compute Engine instance - group resource name, for relationship type - ``INSTANCE_TO_INSTANCEGROUP``. - - ``sccSecurityMarks.key=value`` to find Cloud resources - that are attached with security marks whose key is - ``key`` and value is ``value``. - - ``sccSecurityMarks.key:*`` to find Cloud resources that - are attached with security marks whose key is ``key``. - - ``state:ACTIVE`` to find Google Cloud resources whose - state contains ``ACTIVE`` as a word. - - ``NOT state:ACTIVE`` to find Google Cloud resources whose - state doesn't contain ``ACTIVE`` as a word. - - ``createTime<1609459200`` to find Google Cloud resources - that were created before ``2021-01-01 00:00:00 UTC``. - ``1609459200`` is the epoch timestamp of - ``2021-01-01 00:00:00 UTC`` in seconds. - - ``updateTime>1609459200`` to find Google Cloud resources - that were updated after ``2021-01-01 00:00:00 UTC``. - ``1609459200`` is the epoch timestamp of - ``2021-01-01 00:00:00 UTC`` in seconds. - - ``Important`` to find Google Cloud resources that contain - ``Important`` as a word in any of the searchable fields. - - ``Impor*`` to find Google Cloud resources that contain - ``Impor`` as a prefix of any word in any of the - searchable fields. - - ``Important location:(us-west1 OR global)`` to find - Google Cloud resources that contain ``Important`` as a - word in any of the searchable fields and are also located - in the ``us-west1`` region or the ``global`` location. - asset_types (MutableSequence[str]): - Optional. A list of asset types that this request searches - for. If empty, it will search all the asset types `supported - by search - APIs `__. - - Regular expressions are also supported. For example: - - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". - - See `RE2 `__ for - all supported regular expression syntax. If the regular - expression does not match any supported asset type, an - INVALID_ARGUMENT error will be returned. - page_size (int): - Optional. The page size for search result pagination. Page - size is capped at 500 even if a larger value is given. If - set to zero or a negative value, server will pick an - appropriate default. Returned results may be fewer than - requested. When this happens, there could be more results as - long as ``next_page_token`` is returned. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``page_token`` must be the value of ``next_page_token`` from - the previous response. The values of all other method - parameters, must be identical to those in the previous call. - order_by (str): - Optional. A comma-separated list of fields specifying the - sorting order of the results. The default order is - ascending. Add " DESC" after the field name to indicate - descending order. Redundant space characters are ignored. - Example: "location DESC, name". Only the following fields in - the response are sortable: - - - name - - assetType - - project - - displayName - - description - - location - - createTime - - updateTime - - state - - parentFullResourceName - - parentAssetType - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. A comma-separated list of fields that you want - returned in the results. The following fields are returned - by default if not specified: - - - ``name`` - - ``assetType`` - - ``project`` - - ``folders`` - - ``organization`` - - ``displayName`` - - ``description`` - - ``location`` - - ``labels`` - - ``tags`` - - ``effectiveTags`` - - ``networkTags`` - - ``kmsKeys`` - - ``createTime`` - - ``updateTime`` - - ``state`` - - ``additionalAttributes`` - - ``parentFullResourceName`` - - ``parentAssetType`` - - Some fields of large size, such as ``versionedResources``, - ``attachedResources``, ``effectiveTags`` etc., are not - returned by default, but you can specify them in the - ``read_mask`` parameter if you want to include them. If - ``"*"`` is specified, all `available - fields `__ - are returned. Examples: ``"name,location"``, - ``"name,versionedResources"``, ``"*"``. Any invalid field - path will trigger INVALID_ARGUMENT error. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - order_by: str = proto.Field( - proto.STRING, - number=6, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=8, - message=field_mask_pb2.FieldMask, - ) - - -class SearchAllResourcesResponse(proto.Message): - r"""Search all resources response. - - Attributes: - results (MutableSequence[google.cloud.asset_v1.types.ResourceSearchResult]): - A list of Resources that match the search - query. It contains the resource standard - metadata information. - next_page_token (str): - If there are more results than those appearing in this - response, then ``next_page_token`` is included. To get the - next set of results, call this method again using the value - of ``next_page_token`` as ``page_token``. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence[gca_assets.ResourceSearchResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_assets.ResourceSearchResult, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SearchAllIamPoliciesRequest(proto.Message): - r"""Search all IAM policies request. - - Attributes: - scope (str): - Required. A scope can be a project, a folder, or an - organization. The search is limited to the IAM policies - within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllIamPolicies`` `__ - permission on the desired scope. - - The allowed values are: - - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - query (str): - Optional. The query statement. See `how to construct a - query `__ - for more information. If not specified or empty, it will - search all the IAM policies within the specified ``scope``. - Note that the query string is compared against each IAM - policy binding, including its principals, roles, and IAM - conditions. The returned IAM policies will only contain the - bindings that match your query. To learn more about the IAM - policy structure, see the `IAM policy - documentation `__. - - Examples: - - - ``policy:amy@gmail.com`` to find IAM policy bindings that - specify user "amy@gmail.com". - - ``policy:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``policy:comp*`` to find IAM policy bindings that contain - "comp" as a prefix of any word in the binding. - - ``policy.role.permissions:storage.buckets.update`` to - find IAM policy bindings that specify a role containing - "storage.buckets.update" permission. Note that if callers - don't have ``iam.roles.get`` access to a role's included - permissions, policy bindings that specify this role will - be dropped from the search results. - - ``policy.role.permissions:upd*`` to find IAM policy - bindings that specify a role containing "upd" as a prefix - of any word in the role permission. Note that if callers - don't have ``iam.roles.get`` access to a role's included - permissions, policy bindings that specify this role will - be dropped from the search results. - - ``resource:organizations/123456`` to find IAM policy - bindings that are set on "organizations/123456". - - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` - to find IAM policy bindings that are set on the project - named "myproject". - - ``Important`` to find IAM policy bindings that contain - "Important" as a word in any of the searchable fields - (except for the included permissions). - - ``resource:(instance1 OR instance2) policy:amy`` to find - IAM policy bindings that are set on resources "instance1" - or "instance2" and also specify user "amy". - - ``roles:roles/compute.admin`` to find IAM policy bindings - that specify the Compute Admin role. - - ``memberTypes:user`` to find IAM policy bindings that - contain the principal type "user". - page_size (int): - Optional. The page size for search result pagination. Page - size is capped at 500 even if a larger value is given. If - set to zero or a negative value, server will pick an - appropriate default. Returned results may be fewer than - requested. When this happens, there could be more results as - long as ``next_page_token`` is returned. - page_token (str): - Optional. If present, retrieve the next batch of results - from the preceding call to this method. ``page_token`` must - be the value of ``next_page_token`` from the previous - response. The values of all other method parameters must be - identical to those in the previous call. - asset_types (MutableSequence[str]): - Optional. A list of asset types that the IAM policies are - attached to. If empty, it will search the IAM policies that - are attached to all the asset types `supported by search - APIs `__ - - Regular expressions are also supported. For example: - - - "compute.googleapis.com.*" snapshots IAM policies - attached to asset type starts with - "compute.googleapis.com". - - ".*Instance" snapshots IAM policies attached to asset - type ends with "Instance". - - ".*Instance.*" snapshots IAM policies attached to asset - type contains "Instance". - - See `RE2 `__ for - all supported regular expression syntax. If the regular - expression does not match any supported asset type, an - INVALID_ARGUMENT error will be returned. - order_by (str): - Optional. A comma-separated list of fields specifying the - sorting order of the results. The default order is - ascending. Add " DESC" after the field name to indicate - descending order. Redundant space characters are ignored. - Example: "assetType DESC, resource". Only singular primitive - fields in the response are sortable: - - - resource - - assetType - - project All the other fields such as repeated fields - (e.g., ``folders``) and non-primitive fields (e.g., - ``policy``) are not supported. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - order_by: str = proto.Field( - proto.STRING, - number=7, - ) - - -class SearchAllIamPoliciesResponse(proto.Message): - r"""Search all IAM policies response. - - Attributes: - results (MutableSequence[google.cloud.asset_v1.types.IamPolicySearchResult]): - A list of IAM policies that match the search - query. Related information such as the - associated resource is returned along with the - policy. - next_page_token (str): - Set if there are more results than those appearing in this - response; to get the next set of results, call this method - again, using this value as the ``page_token``. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence[gca_assets.IamPolicySearchResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_assets.IamPolicySearchResult, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class IamPolicyAnalysisQuery(proto.Message): - r"""IAM policy analysis query message. - - Attributes: - scope (str): - Required. The relative name of the root asset. Only - resources and IAM policies within the scope will be - analyzed. - - This can only be an organization number (such as - "organizations/123"), a folder number (such as - "folders/123"), a project ID (such as - "projects/my-project-id"), or a project number (such as - "projects/12345"). - - To know how to get organization ID, visit - `here `__. - - To know how to get folder or project ID, visit - `here `__. - resource_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.ResourceSelector): - Optional. Specifies a resource for analysis. - identity_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.IdentitySelector): - Optional. Specifies an identity for analysis. - access_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.AccessSelector): - Optional. Specifies roles or permissions for - analysis. This is optional. - options (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.Options): - Optional. The query options. - condition_context (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.ConditionContext): - Optional. The hypothetical context for IAM - conditions evaluation. - """ - - class ResourceSelector(proto.Message): - r"""Specifies the resource to analyze for access policies, which - may be set directly on the resource, or on ancestors such as - organizations, folders or projects. - - Attributes: - full_resource_name (str): - Required. The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of a resource of `supported resource - types `__. - """ - - full_resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - - class IdentitySelector(proto.Message): - r"""Specifies an identity for which to determine resource access, - based on roles assigned either directly to them or to the groups - they belong to, directly or indirectly. - - Attributes: - identity (str): - Required. The identity appear in the form of principals in - `IAM policy - binding `__. - - The examples of supported forms are: - "user:mike@example.com", "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com". - - Notice that wildcard characters (such as \* and ?) are not - supported. You must give a specific identity. - """ - - identity: str = proto.Field( - proto.STRING, - number=1, - ) - - class AccessSelector(proto.Message): - r"""Specifies roles and/or permissions to analyze, to determine - both the identities possessing them and the resources they - control. If multiple values are specified, results will include - roles or permissions matching any of them. The total number of - roles and permissions should be equal or less than 10. - - Attributes: - roles (MutableSequence[str]): - Optional. The roles to appear in result. - permissions (MutableSequence[str]): - Optional. The permissions to appear in - result. - """ - - roles: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - permissions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class Options(proto.Message): - r"""Contains query options. - - Attributes: - expand_groups (bool): - Optional. If true, the identities section of the result will - expand any Google groups appearing in an IAM policy binding. - - If - [IamPolicyAnalysisQuery.identity_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.identity_selector] - is specified, the identity in the result will be determined - by the selector, and this flag is not allowed to set. - - If true, the default max expansion per group is 1000 for - AssetService.AnalyzeIamPolicy][]. - - Default is false. - expand_roles (bool): - Optional. If true, the access section of result will expand - any roles appearing in IAM policy bindings to include their - permissions. - - If - [IamPolicyAnalysisQuery.access_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.access_selector] - is specified, the access section of the result will be - determined by the selector, and this flag is not allowed to - set. - - Default is false. - expand_resources (bool): - Optional. If true and - [IamPolicyAnalysisQuery.resource_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.resource_selector] - is not specified, the resource section of the result will - expand any resource attached to an IAM policy to include - resources lower in the resource hierarchy. - - For example, if the request analyzes for which resources - user A has permission P, and the results include an IAM - policy with P on a Google Cloud folder, the results will - also include resources in that folder with permission P. - - If true and - [IamPolicyAnalysisQuery.resource_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.resource_selector] - is specified, the resource section of the result will expand - the specified resource to include resources lower in the - resource hierarchy. Only project or lower resources are - supported. Folder and organization resources cannot be used - together with this option. - - For example, if the request analyzes for which users have - permission P on a Google Cloud project with this option - enabled, the results will include all users who have - permission P on that project or any lower resource. - - If true, the default max expansion per resource is 1000 for - AssetService.AnalyzeIamPolicy][] and 100000 for - AssetService.AnalyzeIamPolicyLongrunning][]. - - Default is false. - output_resource_edges (bool): - Optional. If true, the result will output the - relevant parent/child relationships between - resources. Default is false. - output_group_edges (bool): - Optional. If true, the result will output the - relevant membership relationships between groups - and other groups, and between groups and - principals. Default is false. - analyze_service_account_impersonation (bool): - Optional. If true, the response will include access analysis - from identities to resources via service account - impersonation. This is a very expensive operation, because - many derived queries will be executed. We highly recommend - you use - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] - RPC instead. - - For example, if the request analyzes for which resources - user A has permission P, and there's an IAM policy states - user A has iam.serviceAccounts.getAccessToken permission to - a service account SA, and there's another IAM policy states - service account SA has permission P to a Google Cloud folder - F, then user A potentially has access to the Google Cloud - folder F. And those advanced analysis results will be - included in - [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. - - Another example, if the request analyzes for who has - permission P to a Google Cloud folder F, and there's an IAM - policy states user A has iam.serviceAccounts.actAs - permission to a service account SA, and there's another IAM - policy states service account SA has permission P to the - Google Cloud folder F, then user A potentially has access to - the Google Cloud folder F. And those advanced analysis - results will be included in - [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. - - Only the following permissions are considered in this - analysis: - - - ``iam.serviceAccounts.actAs`` - - ``iam.serviceAccounts.signBlob`` - - ``iam.serviceAccounts.signJwt`` - - ``iam.serviceAccounts.getAccessToken`` - - ``iam.serviceAccounts.getOpenIdToken`` - - ``iam.serviceAccounts.implicitDelegation`` - - Default is false. - """ - - expand_groups: bool = proto.Field( - proto.BOOL, - number=1, - ) - expand_roles: bool = proto.Field( - proto.BOOL, - number=2, - ) - expand_resources: bool = proto.Field( - proto.BOOL, - number=3, - ) - output_resource_edges: bool = proto.Field( - proto.BOOL, - number=4, - ) - output_group_edges: bool = proto.Field( - proto.BOOL, - number=5, - ) - analyze_service_account_impersonation: bool = proto.Field( - proto.BOOL, - number=6, - ) - - class ConditionContext(proto.Message): - r"""The IAM conditions context. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - access_time (google.protobuf.timestamp_pb2.Timestamp): - The hypothetical access timestamp to evaluate IAM - conditions. Note that this value must not be earlier than - the current time; otherwise, an INVALID_ARGUMENT error will - be returned. - - This field is a member of `oneof`_ ``TimeContext``. - """ - - access_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - oneof='TimeContext', - message=timestamp_pb2.Timestamp, - ) - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - resource_selector: ResourceSelector = proto.Field( - proto.MESSAGE, - number=2, - message=ResourceSelector, - ) - identity_selector: IdentitySelector = proto.Field( - proto.MESSAGE, - number=3, - message=IdentitySelector, - ) - access_selector: AccessSelector = proto.Field( - proto.MESSAGE, - number=4, - message=AccessSelector, - ) - options: Options = proto.Field( - proto.MESSAGE, - number=5, - message=Options, - ) - condition_context: ConditionContext = proto.Field( - proto.MESSAGE, - number=6, - message=ConditionContext, - ) - - -class AnalyzeIamPolicyRequest(proto.Message): - r"""A request message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - - Attributes: - analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): - Required. The request query. - saved_analysis_query (str): - Optional. The name of a saved query, which must be in the - format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - If both ``analysis_query`` and ``saved_analysis_query`` are - provided, they will be merged together with the - ``saved_analysis_query`` as base and the ``analysis_query`` - as overrides. For more details of the merge behavior, refer - to the - `MergeFrom `__ - page. - - Note that you cannot override primitive fields with default - value, such as 0 or empty string, etc., because we use - proto3, which doesn't support field presence yet. - execution_timeout (google.protobuf.duration_pb2.Duration): - Optional. Amount of time executable has to complete. See - JSON representation of - `Duration `__. - - If this field is set with a value less than the RPC - deadline, and the execution of your query hasn't finished in - the specified execution timeout, you will get a response - with partial result. Otherwise, your query's execution will - continue until the RPC deadline. If it's not finished until - then, you will get a DEADLINE_EXCEEDED error. - - Default is empty. - """ - - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisQuery', - ) - saved_analysis_query: str = proto.Field( - proto.STRING, - number=3, - ) - execution_timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - -class AnalyzeIamPolicyResponse(proto.Message): - r"""A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - - Attributes: - main_analysis (google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis): - The main analysis that matches the original - request. - service_account_impersonation_analysis (MutableSequence[google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis]): - The service account impersonation analysis if - [AnalyzeIamPolicyRequest.analyze_service_account_impersonation][] - is enabled. - fully_explored (bool): - Represents whether all entries in the - [main_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.main_analysis] - and - [service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis] - have been fully explored to answer the query in the request. - """ - - class IamPolicyAnalysis(proto.Message): - r"""An analysis message to group the query and results. - - Attributes: - analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): - The analysis query. - analysis_results (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult]): - A list of - [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult] - that matches the analysis query, or empty if no result is - found. - fully_explored (bool): - Represents whether all entries in the - [analysis_results][google.cloud.asset.v1.AnalyzeIamPolicyResponse.IamPolicyAnalysis.analysis_results] - have been fully explored to answer the query. - non_critical_errors (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisState]): - A list of non-critical errors happened during - the query handling. - """ - - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisQuery', - ) - analysis_results: MutableSequence[gca_assets.IamPolicyAnalysisResult] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gca_assets.IamPolicyAnalysisResult, - ) - fully_explored: bool = proto.Field( - proto.BOOL, - number=3, - ) - non_critical_errors: MutableSequence[gca_assets.IamPolicyAnalysisState] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=gca_assets.IamPolicyAnalysisState, - ) - - main_analysis: IamPolicyAnalysis = proto.Field( - proto.MESSAGE, - number=1, - message=IamPolicyAnalysis, - ) - service_account_impersonation_analysis: MutableSequence[IamPolicyAnalysis] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=IamPolicyAnalysis, - ) - fully_explored: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class IamPolicyAnalysisOutputConfig(proto.Message): - r"""Output configuration for export IAM policy analysis - destination. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.GcsDestination): - Destination on Cloud Storage. - - This field is a member of `oneof`_ ``destination``. - bigquery_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination): - Destination on BigQuery. - - This field is a member of `oneof`_ ``destination``. - """ - - class GcsDestination(proto.Message): - r"""A Cloud Storage location. - - Attributes: - uri (str): - Required. The URI of the Cloud Storage object. It's the same - URI that is used by gsutil. Example: - "gs://bucket_name/object_name". See `Viewing and Editing - Object - Metadata `__ - for more information. - - If the specified Cloud Storage object already exists and - there is no - `hold `__, - it will be overwritten with the analysis result. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - - class BigQueryDestination(proto.Message): - r"""A BigQuery destination. - - Attributes: - dataset (str): - Required. The BigQuery dataset in format - "projects/projectId/datasets/datasetId", to which the - analysis results should be exported. If this dataset does - not exist, the export call will return an INVALID_ARGUMENT - error. - table_prefix (str): - Required. The prefix of the BigQuery tables to which the - analysis results will be written. Tables will be created - based on this table_prefix if not exist: - - - _analysis table will contain export - operation's metadata. - - _analysis_result will contain all the - [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult]. - When [partition_key] is specified, both tables will be - partitioned based on the [partition_key]. - partition_key (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey): - The partition key for BigQuery partitioned - table. - write_disposition (str): - Optional. Specifies the action that occurs if the - destination table or partition already exists. The following - values are supported: - - - WRITE_TRUNCATE: If the table or partition already exists, - BigQuery overwrites the entire table or all the - partitions data. - - WRITE_APPEND: If the table or partition already exists, - BigQuery appends the data to the table or the latest - partition. - - WRITE_EMPTY: If the table already exists and contains - data, an error is returned. - - The default value is WRITE_APPEND. Each action is atomic and - only occurs if BigQuery is able to complete the job - successfully. Details are at - https://cloud.google.com/bigquery/docs/loading-data-local#appending_to_or_overwriting_a_table_using_a_local_file. - """ - class PartitionKey(proto.Enum): - r"""This enum determines the partition key column for the - bigquery tables. Partitioning can improve query performance and - reduce query cost by filtering partitions. Refer to - https://cloud.google.com/bigquery/docs/partitioned-tables for - details. - - Values: - PARTITION_KEY_UNSPECIFIED (0): - Unspecified partition key. Tables won't be - partitioned using this option. - REQUEST_TIME (1): - The time when the request is received. If - specified as partition key, the result table(s) - is partitoned by the RequestTime column, an - additional timestamp column representing when - the request was received. - """ - PARTITION_KEY_UNSPECIFIED = 0 - REQUEST_TIME = 1 - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - table_prefix: str = proto.Field( - proto.STRING, - number=2, - ) - partition_key: 'IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey' = proto.Field( - proto.ENUM, - number=3, - enum='IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey', - ) - write_disposition: str = proto.Field( - proto.STRING, - number=4, - ) - - gcs_destination: GcsDestination = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message=GcsDestination, - ) - bigquery_destination: BigQueryDestination = proto.Field( - proto.MESSAGE, - number=2, - oneof='destination', - message=BigQueryDestination, - ) - - -class AnalyzeIamPolicyLongrunningRequest(proto.Message): - r"""A request message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - - Attributes: - analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): - Required. The request query. - saved_analysis_query (str): - Optional. The name of a saved query, which must be in the - format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - - If both ``analysis_query`` and ``saved_analysis_query`` are - provided, they will be merged together with the - ``saved_analysis_query`` as base and the ``analysis_query`` - as overrides. For more details of the merge behavior, refer - to the - `MergeFrom `__ - doc. - - Note that you cannot override primitive fields with default - value, such as 0 or empty string, etc., because we use - proto3, which doesn't support field presence yet. - output_config (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig): - Required. Output configuration indicating - where the results will be output to. - """ - - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisQuery', - ) - saved_analysis_query: str = proto.Field( - proto.STRING, - number=3, - ) - output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisOutputConfig', - ) - - -class AnalyzeIamPolicyLongrunningResponse(proto.Message): - r"""A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - - """ - - -class SavedQuery(proto.Message): - r"""A saved query which can be shared with others or used later. - - Attributes: - name (str): - The resource name of the saved query. The format must be: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - description (str): - The description of this saved query. This - value should be fewer than 255 characters. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The create time of this saved - query. - creator (str): - Output only. The account's email address who - has created this saved query. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update time of this - saved query. - last_updater (str): - Output only. The account's email address who - has updated this saved query most recently. - labels (MutableMapping[str, str]): - Labels applied on the resource. - This value should not contain more than 10 - entries. The key and value of each entry must be - non-empty and fewer than 64 characters. - content (google.cloud.asset_v1.types.SavedQuery.QueryContent): - The query content. - """ - - class QueryContent(proto.Message): - r"""The query content. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - iam_policy_analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): - An IAM Policy Analysis query, which could be used in the - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy] - RPC or the - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] - RPC. - - This field is a member of `oneof`_ ``query_content``. - """ - - iam_policy_analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( - proto.MESSAGE, - number=1, - oneof='query_content', - message='IamPolicyAnalysisQuery', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - creator: str = proto.Field( - proto.STRING, - number=4, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - last_updater: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - content: QueryContent = proto.Field( - proto.MESSAGE, - number=8, - message=QueryContent, - ) - - -class CreateSavedQueryRequest(proto.Message): - r"""Request to create a saved query. - - Attributes: - parent (str): - Required. The name of the project/folder/organization where - this saved_query should be created in. It can only be an - organization number (such as "organizations/123"), a folder - number (such as "folders/123"), a project ID (such as - "projects/my-project-id"), or a project number (such as - "projects/12345"). - saved_query (google.cloud.asset_v1.types.SavedQuery): - Required. The saved_query details. The ``name`` field must - be empty as it will be generated based on the parent and - saved_query_id. - saved_query_id (str): - Required. The ID to use for the saved query, which must be - unique in the specified parent. It will become the final - component of the saved query's resource name. - - This value should be 4-63 characters, and valid characters - are ``[a-z][0-9]-``. - - Notice that this field is required in the saved query - creation, and the ``name`` field of the ``saved_query`` will - be ignored. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - saved_query: 'SavedQuery' = proto.Field( - proto.MESSAGE, - number=2, - message='SavedQuery', - ) - saved_query_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class GetSavedQueryRequest(proto.Message): - r"""Request to get a saved query. - - Attributes: - name (str): - Required. The name of the saved query and it must be in the - format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSavedQueriesRequest(proto.Message): - r"""Request to list saved queries. - - Attributes: - parent (str): - Required. The parent - project/folder/organization whose savedQueries - are to be listed. It can only be using - project/folder/organization number (such as - "folders/12345")", or a project ID (such as - "projects/my-project-id"). - filter (str): - Optional. The expression to filter resources. The expression - is a list of zero or more restrictions combined via logical - operators ``AND`` and ``OR``. When ``AND`` and ``OR`` are - both used in the expression, parentheses must be - appropriately used to group the combinations. The expression - may also contain regular expressions. - - See https://google.aip.dev/160 for more information on the - grammar. - page_size (int): - Optional. The maximum number of saved queries - to return per page. The service may return fewer - than this value. If unspecified, at most 50 will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - Optional. A page token, received from a previous - ``ListSavedQueries`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListSavedQueries`` must match the call that provided the - page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListSavedQueriesResponse(proto.Message): - r"""Response of listing saved queries. - - Attributes: - saved_queries (MutableSequence[google.cloud.asset_v1.types.SavedQuery]): - A list of savedQueries. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - saved_queries: MutableSequence['SavedQuery'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SavedQuery', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateSavedQueryRequest(proto.Message): - r"""Request to update a saved query. - - Attributes: - saved_query (google.cloud.asset_v1.types.SavedQuery): - Required. The saved query to update. - - The saved query's ``name`` field is used to identify the one - to update, which has format as below: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to update. - """ - - saved_query: 'SavedQuery' = proto.Field( - proto.MESSAGE, - number=1, - message='SavedQuery', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteSavedQueryRequest(proto.Message): - r"""Request to delete a saved query. - - Attributes: - name (str): - Required. The name of the saved query to delete. It must be - in the format of: - - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class AnalyzeMoveRequest(proto.Message): - r"""The request message for performing resource move analysis. - - Attributes: - resource (str): - Required. Name of the resource to perform the - analysis against. Only Google Cloud projects are - supported as of today. Hence, this can only be a - project ID (such as "projects/my-project-id") or - a project number (such as "projects/12345"). - destination_parent (str): - Required. Name of the Google Cloud folder or - organization to reparent the target resource. - The analysis will be performed against - hypothetically moving the resource to this - specified desitination parent. This can only be - a folder number (such as "folders/123") or an - organization number (such as - "organizations/123"). - view (google.cloud.asset_v1.types.AnalyzeMoveRequest.AnalysisView): - Analysis view indicating what information - should be included in the analysis response. If - unspecified, the default view is FULL. - """ - class AnalysisView(proto.Enum): - r"""View enum for supporting partial analysis responses. - - Values: - ANALYSIS_VIEW_UNSPECIFIED (0): - The default/unset value. - The API will default to the FULL view. - FULL (1): - Full analysis including all level of impacts - of the specified resource move. - BASIC (2): - Basic analysis only including blockers which - will prevent the specified resource move at - runtime. - """ - ANALYSIS_VIEW_UNSPECIFIED = 0 - FULL = 1 - BASIC = 2 - - resource: str = proto.Field( - proto.STRING, - number=1, - ) - destination_parent: str = proto.Field( - proto.STRING, - number=2, - ) - view: AnalysisView = proto.Field( - proto.ENUM, - number=3, - enum=AnalysisView, - ) - - -class AnalyzeMoveResponse(proto.Message): - r"""The response message for resource move analysis. - - Attributes: - move_analysis (MutableSequence[google.cloud.asset_v1.types.MoveAnalysis]): - The list of analyses returned from performing - the intended resource move analysis. The - analysis is grouped by different Google Cloud - services. - """ - - move_analysis: MutableSequence['MoveAnalysis'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='MoveAnalysis', - ) - - -class MoveAnalysis(proto.Message): - r"""A message to group the analysis information. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - display_name (str): - The user friendly display name of the - analysis. E.g. IAM, organization policy etc. - analysis (google.cloud.asset_v1.types.MoveAnalysisResult): - Analysis result of moving the target - resource. - - This field is a member of `oneof`_ ``result``. - error (google.rpc.status_pb2.Status): - Description of error encountered when - performing the analysis. - - This field is a member of `oneof`_ ``result``. - """ - - display_name: str = proto.Field( - proto.STRING, - number=1, - ) - analysis: 'MoveAnalysisResult' = proto.Field( - proto.MESSAGE, - number=2, - oneof='result', - message='MoveAnalysisResult', - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=3, - oneof='result', - message=status_pb2.Status, - ) - - -class MoveAnalysisResult(proto.Message): - r"""An analysis result including blockers and warnings. - - Attributes: - blockers (MutableSequence[google.cloud.asset_v1.types.MoveImpact]): - Blocking information that would prevent the - target resource from moving to the specified - destination at runtime. - warnings (MutableSequence[google.cloud.asset_v1.types.MoveImpact]): - Warning information indicating that moving - the target resource to the specified destination - might be unsafe. This can include important - policy information and configuration changes, - but will not block moves at runtime. - """ - - blockers: MutableSequence['MoveImpact'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='MoveImpact', - ) - warnings: MutableSequence['MoveImpact'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='MoveImpact', - ) - - -class MoveImpact(proto.Message): - r"""A message to group impacts of moving the target resource. - - Attributes: - detail (str): - User friendly impact detail in a free form - message. - """ - - detail: str = proto.Field( - proto.STRING, - number=1, - ) - - -class QueryAssetsOutputConfig(proto.Message): - r"""Output configuration query assets. - - Attributes: - bigquery_destination (google.cloud.asset_v1.types.QueryAssetsOutputConfig.BigQueryDestination): - BigQuery destination where the query results - will be saved. - """ - - class BigQueryDestination(proto.Message): - r"""BigQuery destination. - - Attributes: - dataset (str): - Required. The BigQuery dataset where the - query results will be saved. It has the format - of "projects/{projectId}/datasets/{datasetId}". - table (str): - Required. The BigQuery table where the query - results will be saved. If this table does not - exist, a new table with the given name will be - created. - write_disposition (str): - Specifies the action that occurs if the destination table or - partition already exists. The following values are - supported: - - - WRITE_TRUNCATE: If the table or partition already exists, - BigQuery overwrites the entire table or all the - partitions data. - - WRITE_APPEND: If the table or partition already exists, - BigQuery appends the data to the table or the latest - partition. - - WRITE_EMPTY: If the table already exists and contains - data, a 'duplicate' error is returned in the job result. - - The default value is WRITE_EMPTY. - """ - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - table: str = proto.Field( - proto.STRING, - number=2, - ) - write_disposition: str = proto.Field( - proto.STRING, - number=3, - ) - - bigquery_destination: BigQueryDestination = proto.Field( - proto.MESSAGE, - number=1, - message=BigQueryDestination, - ) - - -class QueryAssetsRequest(proto.Message): - r"""QueryAssets request. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The relative name of the root asset. This can only - be an organization number (such as "organizations/123"), a - project ID (such as "projects/my-project-id"), or a project - number (such as "projects/12345"), or a folder number (such - as "folders/123"). - - Only assets belonging to the ``parent`` will be returned. - statement (str): - Optional. A SQL statement that's compatible with `BigQuery - SQL `__. - - This field is a member of `oneof`_ ``query``. - job_reference (str): - Optional. Reference to the query job, which is from the - ``QueryAssetsResponse`` of previous ``QueryAssets`` call. - - This field is a member of `oneof`_ ``query``. - page_size (int): - Optional. The maximum number of rows to return in the - results. Responses are limited to 10 MB and 1000 rows. - - By default, the maximum row count is 1000. When the byte or - row count limit is reached, the rest of the query results - will be paginated. - - The field will be ignored when [output_config] is specified. - page_token (str): - Optional. A page token received from previous - ``QueryAssets``. - - The field will be ignored when [output_config] is specified. - timeout (google.protobuf.duration_pb2.Duration): - Optional. Specifies the maximum amount of time that the - client is willing to wait for the query to complete. By - default, this limit is 5 min for the first query, and 1 - minute for the following queries. If the query is complete, - the ``done`` field in the ``QueryAssetsResponse`` is true, - otherwise false. - - Like BigQuery `jobs.query - API `__ - The call is not guaranteed to wait for the specified - timeout; it typically returns after around 200 seconds - (200,000 milliseconds), even if the query is not complete. - - The field will be ignored when [output_config] is specified. - read_time_window (google.cloud.asset_v1.types.TimeWindow): - Optional. [start_time] is required. [start_time] must be - less than [end_time] Defaults [end_time] to now if - [start_time] is set and [end_time] isn't. Maximum permitted - time range is 7 days. - - This field is a member of `oneof`_ ``time``. - read_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Queries cloud assets as they - appeared at the specified point in time. - - This field is a member of `oneof`_ ``time``. - output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Optional. Destination where the query results will be saved. - - When this field is specified, the query results won't be - saved in the [QueryAssetsResponse.query_result]. Instead - [QueryAssetsResponse.output_config] will be set. - - Meanwhile, [QueryAssetsResponse.job_reference] will be set - and can be used to check the status of the query job when - passed to a following [QueryAssets] API call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - statement: str = proto.Field( - proto.STRING, - number=2, - oneof='query', - ) - job_reference: str = proto.Field( - proto.STRING, - number=3, - oneof='query', - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=6, - message=duration_pb2.Duration, - ) - read_time_window: gca_assets.TimeWindow = proto.Field( - proto.MESSAGE, - number=7, - oneof='time', - message=gca_assets.TimeWindow, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - oneof='time', - message=timestamp_pb2.Timestamp, - ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( - proto.MESSAGE, - number=9, - message='QueryAssetsOutputConfig', - ) - - -class QueryAssetsResponse(proto.Message): - r"""QueryAssets response. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - job_reference (str): - Reference to a query job. - done (bool): - The query response, which can be either an ``error`` or a - valid ``response``. - - If ``done`` == ``false`` and the query result is being saved - in an output, the output_config field will be set. If - ``done`` == ``true``, exactly one of ``error``, - ``query_result`` or ``output_config`` will be set. [done] is - unset unless the [QueryAssetsResponse] contains a - [QueryAssetsResponse.job_reference]. - error (google.rpc.status_pb2.Status): - Error status. - - This field is a member of `oneof`_ ``response``. - query_result (google.cloud.asset_v1.types.QueryResult): - Result of the query. - - This field is a member of `oneof`_ ``response``. - output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): - Output configuration, which indicates that - instead of being returned in an API response on - the fly, the query result will be saved in a - specific output. - - This field is a member of `oneof`_ ``response``. - """ - - job_reference: str = proto.Field( - proto.STRING, - number=1, - ) - done: bool = proto.Field( - proto.BOOL, - number=2, - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=3, - oneof='response', - message=status_pb2.Status, - ) - query_result: 'QueryResult' = proto.Field( - proto.MESSAGE, - number=4, - oneof='response', - message='QueryResult', - ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='response', - message='QueryAssetsOutputConfig', - ) - - -class QueryResult(proto.Message): - r"""Execution results of the query. - - The result is formatted as rows represented by BigQuery compatible - [schema]. When pagination is necessary, it will contains the page - token to retrieve the results of following pages. - - Attributes: - rows (MutableSequence[google.protobuf.struct_pb2.Struct]): - Each row hold a query result in the format of ``Struct``. - schema (google.cloud.asset_v1.types.TableSchema): - Describes the format of the [rows]. - next_page_token (str): - Token to retrieve the next page of the - results. - total_rows (int): - Total rows of the whole query results. - """ - - @property - def raw_page(self): - return self - - rows: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=struct_pb2.Struct, - ) - schema: 'TableSchema' = proto.Field( - proto.MESSAGE, - number=2, - message='TableSchema', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - total_rows: int = proto.Field( - proto.INT64, - number=4, - ) - - -class TableSchema(proto.Message): - r"""BigQuery Compatible table schema. - - Attributes: - fields (MutableSequence[google.cloud.asset_v1.types.TableFieldSchema]): - Describes the fields in a table. - """ - - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='TableFieldSchema', - ) - - -class TableFieldSchema(proto.Message): - r"""A field in TableSchema. - - Attributes: - field (str): - The field name. The name must contain only letters (a-z, - A-Z), numbers (0-9), or underscores (_), and must start with - a letter or underscore. The maximum length is 128 - characters. - type_ (str): - The field data type. Possible values include - - - STRING - - BYTES - - INTEGER - - FLOAT - - BOOLEAN - - TIMESTAMP - - DATE - - TIME - - DATETIME - - GEOGRAPHY, - - NUMERIC, - - BIGNUMERIC, - - RECORD (where RECORD indicates that the field contains a - nested schema). - mode (str): - The field mode. Possible values include - NULLABLE, REQUIRED and REPEATED. The default - value is NULLABLE. - fields (MutableSequence[google.cloud.asset_v1.types.TableFieldSchema]): - Describes the nested schema fields if the - type property is set to RECORD. - """ - - field: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - mode: str = proto.Field( - proto.STRING, - number=3, - ) - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='TableFieldSchema', - ) - - -class BatchGetEffectiveIamPoliciesRequest(proto.Message): - r"""A request message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - - Attributes: - scope (str): - Required. Only IAM policies on or below the scope will be - returned. - - This can only be an organization number (such as - "organizations/123"), a folder number (such as - "folders/123"), a project ID (such as - "projects/my-project-id"), or a project number (such as - "projects/12345"). - - To know how to get organization ID, visit - `here `__. - - To know how to get folder or project ID, visit - `here `__. - names (MutableSequence[str]): - Required. The names refer to the [full_resource_names] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of the asset types `supported by search - APIs `__. - A maximum of 20 resources' effective policies can be - retrieved in a batch. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class BatchGetEffectiveIamPoliciesResponse(proto.Message): - r"""A response message for - [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - - Attributes: - policy_results (MutableSequence[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy]): - The effective policies for a batch of resources. Note that - the results order is the same as the order of - [BatchGetEffectiveIamPoliciesRequest.names][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.names]. - When a resource does not have any effective IAM policies, - its corresponding policy_result will contain empty - [EffectiveIamPolicy.policies][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.policies]. - """ - - class EffectiveIamPolicy(proto.Message): - r"""The effective IAM policies on one resource. - - Attributes: - full_resource_name (str): - The [full_resource_name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - for which the - [policies][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.policies] - are computed. This is one of the - [BatchGetEffectiveIamPoliciesRequest.names][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.names] - the caller provides in the request. - policies (MutableSequence[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo]): - The effective policies for the - [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name]. - - These policies include the policy set on the - [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name] - and those set on its parents and ancestors up to the - [BatchGetEffectiveIamPoliciesRequest.scope][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.scope]. - Note that these policies are not filtered according to the - resource type of the - [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name]. - - These policies are hierarchically ordered by - [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource] - starting from - [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name] - itself to its parents and ancestors, such that policies[i]'s - [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource] - is the child of policies[i+1]'s - [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource], - if policies[i+1] exists. - """ - - class PolicyInfo(proto.Message): - r"""The IAM policy and its attached resource. - - Attributes: - attached_resource (str): - The full resource name the - [policy][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.policy] - is directly attached to. - policy (google.iam.v1.policy_pb2.Policy): - The IAM policy that's directly attached to the - [attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource]. - """ - - attached_resource: str = proto.Field( - proto.STRING, - number=1, - ) - policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=2, - message=policy_pb2.Policy, - ) - - full_resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - policies: MutableSequence['BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo', - ) - - policy_results: MutableSequence[EffectiveIamPolicy] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=EffectiveIamPolicy, - ) - - -class AnalyzerOrgPolicy(proto.Message): - r"""This organization policy message is a modified version of the - one defined in the Organization Policy system. This message - contains several fields defined in the original organization - policy with some new fields for analysis purpose. - - Attributes: - attached_resource (str): - The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of an organization/folder/project resource where this - organization policy is set. - - Notice that some type of constraints are defined with - default policy. This field will be empty for them. - applied_resource (str): - The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of an organization/folder/project resource where this - organization policy applies to. - - For any user defined org policies, this field has the same - value as the [attached_resource] field. Only for default - policy, this field has the different value. - rules (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule]): - List of rules for this organization policy. - inherit_from_parent (bool): - If ``inherit_from_parent`` is true, Rules set higher up in - the hierarchy (up to the closest root) are inherited and - present in the effective policy. If it is false, then no - rules are inherited, and this policy becomes the effective - root for evaluation. - reset (bool): - Ignores policies set above this resource and restores the - default behavior of the constraint at this resource. This - field can be set in policies for either list or boolean - constraints. If set, ``rules`` must be empty and - ``inherit_from_parent`` must be set to false. - """ - - class Rule(proto.Message): - r"""This rule message is a customized version of the one defined - in the Organization Policy system. In addition to the fields - defined in the original organization policy, it contains - additional field(s) under specific circumstances to support - analysis results. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - values (google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule.StringValues): - List of values to be used for this policy - rule. This field can be set only in policies for - list constraints. - - This field is a member of `oneof`_ ``kind``. - allow_all (bool): - Setting this to true means that all values - are allowed. This field can be set only in - Policies for list constraints. - - This field is a member of `oneof`_ ``kind``. - deny_all (bool): - Setting this to true means that all values - are denied. This field can be set only in - Policies for list constraints. - - This field is a member of `oneof`_ ``kind``. - enforce (bool): - If ``true``, then the ``Policy`` is enforced. If ``false``, - then any configuration is acceptable. This field can be set - only in Policies for boolean constraints. - - This field is a member of `oneof`_ ``kind``. - condition (google.type.expr_pb2.Expr): - The evaluating condition for this rule. - condition_evaluation (google.cloud.asset_v1.types.ConditionEvaluation): - The condition evaluation result for this rule. Only - populated if it meets all the following criteria: - - - There is a - [condition][google.cloud.asset.v1.AnalyzerOrgPolicy.Rule.condition] - defined for this rule. - - This rule is within - [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.consolidated_policy][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.consolidated_policy], - or - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.consolidated_policy][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.consolidated_policy] - when the - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset] - has - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.governed_resource][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.governed_resource]. - """ - - class StringValues(proto.Message): - r"""The string values for the list constraints. - - Attributes: - allowed_values (MutableSequence[str]): - List of values allowed at this resource. - denied_values (MutableSequence[str]): - List of values denied at this resource. - """ - - allowed_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - denied_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - values: 'AnalyzerOrgPolicy.Rule.StringValues' = proto.Field( - proto.MESSAGE, - number=3, - oneof='kind', - message='AnalyzerOrgPolicy.Rule.StringValues', - ) - allow_all: bool = proto.Field( - proto.BOOL, - number=4, - oneof='kind', - ) - deny_all: bool = proto.Field( - proto.BOOL, - number=5, - oneof='kind', - ) - enforce: bool = proto.Field( - proto.BOOL, - number=6, - oneof='kind', - ) - condition: expr_pb2.Expr = proto.Field( - proto.MESSAGE, - number=7, - message=expr_pb2.Expr, - ) - condition_evaluation: gca_assets.ConditionEvaluation = proto.Field( - proto.MESSAGE, - number=8, - message=gca_assets.ConditionEvaluation, - ) - - attached_resource: str = proto.Field( - proto.STRING, - number=1, - ) - applied_resource: str = proto.Field( - proto.STRING, - number=5, - ) - rules: MutableSequence[Rule] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Rule, - ) - inherit_from_parent: bool = proto.Field( - proto.BOOL, - number=3, - ) - reset: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class AnalyzerOrgPolicyConstraint(proto.Message): - r"""The organization policy constraint definition. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - google_defined_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint): - The definition of the canned constraint - defined by Google. - - This field is a member of `oneof`_ ``constraint_definition``. - custom_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint): - The definition of the custom constraint. - - This field is a member of `oneof`_ ``constraint_definition``. - """ - - class Constraint(proto.Message): - r"""The definition of a constraint. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The unique name of the constraint. Format of the name should - be - - - ``constraints/{constraint_name}`` - - For example, - ``constraints/compute.disableSerialPortAccess``. - display_name (str): - The human readable name of the constraint. - description (str): - Detailed description of what this ``Constraint`` controls as - well as how and where it is enforced. - constraint_default (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault): - The evaluation behavior of this constraint in - the absence of 'Policy'. - list_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.ListConstraint): - Defines this constraint as being a - ListConstraint. - - This field is a member of `oneof`_ ``constraint_type``. - boolean_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint): - Defines this constraint as being a - BooleanConstraint. - - This field is a member of `oneof`_ ``constraint_type``. - """ - class ConstraintDefault(proto.Enum): - r"""Specifies the default behavior in the absence of any ``Policy`` for - the ``Constraint``. This must not be - ``CONSTRAINT_DEFAULT_UNSPECIFIED``. - - Values: - CONSTRAINT_DEFAULT_UNSPECIFIED (0): - This is only used for distinguishing unset - values and should never be used. - ALLOW (1): - Indicate that all values are allowed for list - constraints. Indicate that enforcement is off - for boolean constraints. - DENY (2): - Indicate that all values are denied for list - constraints. Indicate that enforcement is on for - boolean constraints. - """ - CONSTRAINT_DEFAULT_UNSPECIFIED = 0 - ALLOW = 1 - DENY = 2 - - class ListConstraint(proto.Message): - r"""A ``Constraint`` that allows or disallows a list of string values, - which are configured by an organization's policy administrator with - a ``Policy``. - - Attributes: - supports_in (bool): - Indicates whether values grouped into categories can be used - in ``Policy.allowed_values`` and ``Policy.denied_values``. - For example, ``"in:Python"`` would match any value in the - 'Python' group. - supports_under (bool): - Indicates whether subtrees of Cloud Resource Manager - resource hierarchy can be used in ``Policy.allowed_values`` - and ``Policy.denied_values``. For example, - ``"under:folders/123"`` would match any resource under the - 'folders/123' folder. - """ - - supports_in: bool = proto.Field( - proto.BOOL, - number=1, - ) - supports_under: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class BooleanConstraint(proto.Message): - r"""A ``Constraint`` that is either enforced or not. - - For example a constraint - ``constraints/compute.disableSerialPortAccess``. If it is enforced - on a VM instance, serial port connections will not be opened to that - instance. - - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - constraint_default: 'AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault' = proto.Field( - proto.ENUM, - number=4, - enum='AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault', - ) - list_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.ListConstraint' = proto.Field( - proto.MESSAGE, - number=5, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.ListConstraint', - ) - boolean_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint' = proto.Field( - proto.MESSAGE, - number=6, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint', - ) - - class CustomConstraint(proto.Message): - r"""The definition of a custom constraint. - - Attributes: - name (str): - Name of the constraint. This is unique within the - organization. Format of the name should be - - - ``organizations/{organization_id}/customConstraints/{custom_constraint_id}`` - - Example : - "organizations/123/customConstraints/custom.createOnlyE2TypeVms". - resource_types (MutableSequence[str]): - The Resource Instance type on which this policy applies to. - Format will be of the form : "/" Example: - - - ``compute.googleapis.com/Instance``. - method_types (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType]): - All the operations being applied for this - constraint. - condition (str): - Organization Policy condition/expression. For example: - ``resource.instanceName.matches("[production|test]_.*_(\d)+")'`` - or, ``resource.management.auto_upgrade == true`` - action_type (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType): - Allow or deny type. - display_name (str): - One line display name for the UI. - description (str): - Detailed information about this custom policy - constraint. - """ - class MethodType(proto.Enum): - r"""The operation in which this constraint will be applied. For example: - If the constraint applies only when create VMs, the method_types - will be "CREATE" only. If the constraint applied when create or - delete VMs, the method_types will be "CREATE" and "DELETE". - - Values: - METHOD_TYPE_UNSPECIFIED (0): - Unspecified. Will results in user error. - CREATE (1): - Constraint applied when creating the - resource. - UPDATE (2): - Constraint applied when updating the - resource. - DELETE (3): - Constraint applied when deleting the - resource. - """ - METHOD_TYPE_UNSPECIFIED = 0 - CREATE = 1 - UPDATE = 2 - DELETE = 3 - - class ActionType(proto.Enum): - r"""Allow or deny type. - - Values: - ACTION_TYPE_UNSPECIFIED (0): - Unspecified. Will results in user error. - ALLOW (1): - Allowed action type. - DENY (2): - Deny action type. - """ - ACTION_TYPE_UNSPECIFIED = 0 - ALLOW = 1 - DENY = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - resource_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - method_types: MutableSequence['AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType'] = proto.RepeatedField( - proto.ENUM, - number=3, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType', - ) - condition: str = proto.Field( - proto.STRING, - number=4, - ) - action_type: 'AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType' = proto.Field( - proto.ENUM, - number=5, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType', - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=7, - ) - - google_defined_constraint: Constraint = proto.Field( - proto.MESSAGE, - number=1, - oneof='constraint_definition', - message=Constraint, - ) - custom_constraint: CustomConstraint = proto.Field( - proto.MESSAGE, - number=2, - oneof='constraint_definition', - message=CustomConstraint, - ) - - -class AnalyzeOrgPoliciesRequest(proto.Message): - r"""A request message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - scope (str): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - constraint (str): - Required. The name of the constraint to - analyze organization policies for. The response - only contains analyzed organization policies for - the provided constraint. - filter (str): - The expression to filter - [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - Filtering is currently available for bare literal values and - the following fields: - - - consolidated_policy.attached_resource - - consolidated_policy.rules.enforce - - When filtering by a specific field, the only supported - operator is ``=``. For example, filtering by - consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" - will return all the Organization Policy results attached to - "folders/001". - page_size (int): - The maximum number of items to return per page. If - unspecified, - [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results] - will contain 20 items with a maximum of 200. - - This field is a member of `oneof`_ ``_page_size``. - page_token (str): - The pagination token to retrieve the next - page. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - constraint: str = proto.Field( - proto.STRING, - number=2, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - - -class AnalyzeOrgPoliciesResponse(proto.Message): - r"""The response message for - [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. - - Attributes: - org_policy_results (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse.OrgPolicyResult]): - The organization policies under the - [AnalyzeOrgPoliciesRequest.scope][google.cloud.asset.v1.AnalyzeOrgPoliciesRequest.scope] - with the - [AnalyzeOrgPoliciesRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPoliciesRequest.constraint]. - constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): - The definition of the constraint in the - request. - next_page_token (str): - The page token to fetch the next page for - [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. - """ - - class OrgPolicyResult(proto.Message): - r"""The organization policy result to the query. - - Attributes: - consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): - The consolidated organization policy for the analyzed - resource. The consolidated organization policy is computed - by merging and evaluating - [AnalyzeOrgPoliciesResponse.policy_bundle][]. The evaluation - will respect the organization policy `hierarchy - rules `__. - policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): - The ordered list of all organization policies from the - [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. - to the scope specified in the request. - - If the constraint is defined with default policy, it will - also appear in the list. - project (str): - The project that this consolidated policy belongs to, in the - format of projects/{PROJECT_NUMBER}. This field is available - when the consolidated policy belongs to a project. - folders (MutableSequence[str]): - The folder(s) that this consolidated policy belongs to, in - the format of folders/{FOLDER_NUMBER}. This field is - available when the consolidated policy belongs (directly or - cascadingly) to one or more folders. - organization (str): - The organization that this consolidated policy belongs to, - in the format of organizations/{ORGANIZATION_NUMBER}. This - field is available when the consolidated policy belongs - (directly or cascadingly) to an organization. - """ - - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( - proto.MESSAGE, - number=1, - message='AnalyzerOrgPolicy', - ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='AnalyzerOrgPolicy', - ) - project: str = proto.Field( - proto.STRING, - number=3, - ) - folders: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - organization: str = proto.Field( - proto.STRING, - number=5, - ) - - @property - def raw_page(self): - return self - - org_policy_results: MutableSequence[OrgPolicyResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=OrgPolicyResult, - ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( - proto.MESSAGE, - number=2, - message='AnalyzerOrgPolicyConstraint', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class AnalyzeOrgPolicyGovernedContainersRequest(proto.Message): - r"""A request message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - scope (str): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. The - output containers will also be limited to the ones governed - by those in-scope organization policies. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - constraint (str): - Required. The name of the constraint to - analyze governed containers for. The analysis - only contains organization policies for the - provided constraint. - filter (str): - The expression to filter - [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. - Filtering is currently available for bare literal values and - the following fields: - - - parent - - consolidated_policy.rules.enforce - - When filtering by a specific field, the only supported - operator is ``=``. For example, filtering by - parent="//cloudresourcemanager.googleapis.com/folders/001" - will return all the containers under "folders/001". - page_size (int): - The maximum number of items to return per page. If - unspecified, - [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers] - will contain 100 items with a maximum of 200. - - This field is a member of `oneof`_ ``_page_size``. - page_token (str): - The pagination token to retrieve the next - page. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - constraint: str = proto.Field( - proto.STRING, - number=2, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - - -class AnalyzeOrgPolicyGovernedContainersResponse(proto.Message): - r"""The response message for - [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. - - Attributes: - governed_containers (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]): - The list of the analyzed governed containers. - constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): - The definition of the constraint in the - request. - next_page_token (str): - The page token to fetch the next page for - [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. - """ - - class GovernedContainer(proto.Message): - r"""The organization/folder/project resource governed by organization - policies of - [AnalyzeOrgPolicyGovernedContainersRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersRequest.constraint]. - - Attributes: - full_resource_name (str): - The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of an organization/folder/project resource. - parent (str): - The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of the parent of - [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.full_resource_name]. - consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): - The consolidated organization policy for the analyzed - resource. The consolidated organization policy is computed - by merging and evaluating - [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.policy_bundle][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.policy_bundle]. - The evaluation will respect the organization policy - `hierarchy - rules `__. - policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): - The ordered list of all organization policies from the - [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. - to the scope specified in the request. - - If the constraint is defined with default policy, it will - also appear in the list. - project (str): - The project that this resource belongs to, in the format of - projects/{PROJECT_NUMBER}. This field is available when the - resource belongs to a project. - folders (MutableSequence[str]): - The folder(s) that this resource belongs to, in the format - of folders/{FOLDER_NUMBER}. This field is available when the - resource belongs (directly or cascadingly) to one or more - folders. - organization (str): - The organization that this resource belongs to, in the - format of organizations/{ORGANIZATION_NUMBER}. This field is - available when the resource belongs (directly or - cascadingly) to an organization. - effective_tags (MutableSequence[google.cloud.asset_v1.types.EffectiveTagDetails]): - The effective tags on this resource. - """ - - full_resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - parent: str = proto.Field( - proto.STRING, - number=2, - ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( - proto.MESSAGE, - number=3, - message='AnalyzerOrgPolicy', - ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzerOrgPolicy', - ) - project: str = proto.Field( - proto.STRING, - number=5, - ) - folders: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - organization: str = proto.Field( - proto.STRING, - number=7, - ) - effective_tags: MutableSequence[gca_assets.EffectiveTagDetails] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=gca_assets.EffectiveTagDetails, - ) - - @property - def raw_page(self): - return self - - governed_containers: MutableSequence[GovernedContainer] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=GovernedContainer, - ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( - proto.MESSAGE, - number=2, - message='AnalyzerOrgPolicyConstraint', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class AnalyzeOrgPolicyGovernedAssetsRequest(proto.Message): - r"""A request message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - scope (str): - Required. The organization to scope the request. Only - organization policies within the scope will be analyzed. The - output assets will also be limited to the ones governed by - those in-scope organization policies. - - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") - constraint (str): - Required. The name of the constraint to - analyze governed assets for. The analysis only - contains analyzed organization policies for the - provided constraint. - filter (str): - The expression to filter - [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. - - For governed resources, filtering is currently available for - bare literal values and the following fields: - - - governed_resource.project - - governed_resource.folders - - consolidated_policy.rules.enforce When filtering by - ``governed_resource.project`` or - ``consolidated_policy.rules.enforce``, the only supported - operator is ``=``. When filtering by - ``governed_resource.folders``, the supported operators - are ``=`` and ``:``. For example, filtering by - ``governed_resource.project="projects/12345678"`` will - return all the governed resources under - "projects/12345678", including the project itself if - applicable. - - For governed IAM policies, filtering is currently available - for bare literal values and the following fields: - - - governed_iam_policy.project - - governed_iam_policy.folders - - consolidated_policy.rules.enforce When filtering by - ``governed_iam_policy.project`` or - ``consolidated_policy.rules.enforce``, the only supported - operator is ``=``. When filtering by - ``governed_iam_policy.folders``, the supported operators - are ``=`` and ``:``. For example, filtering by - ``governed_iam_policy.folders:"folders/12345678"`` will - return all the governed IAM policies under "folders/001". - page_size (int): - The maximum number of items to return per page. If - unspecified, - [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets] - will contain 100 items with a maximum of 200. - - This field is a member of `oneof`_ ``_page_size``. - page_token (str): - The pagination token to retrieve the next - page. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - constraint: str = proto.Field( - proto.STRING, - number=2, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - - -class AnalyzeOrgPolicyGovernedAssetsResponse(proto.Message): - r"""The response message for - [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. - - Attributes: - governed_assets (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]): - The list of the analyzed governed assets. - constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): - The definition of the constraint in the - request. - next_page_token (str): - The page token to fetch the next page for - [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. - """ - - class GovernedResource(proto.Message): - r"""The Google Cloud resources governed by the organization policies of - the - [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. - - Attributes: - full_resource_name (str): - The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of the Google Cloud resource. - parent (str): - The [full resource name] - (https://cloud.google.com/asset-inventory/docs/resource-name-format) - of the parent of - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name]. - project (str): - The project that this resource belongs to, in the format of - projects/{PROJECT_NUMBER}. This field is available when the - resource belongs to a project. - folders (MutableSequence[str]): - The folder(s) that this resource belongs to, in the format - of folders/{FOLDER_NUMBER}. This field is available when the - resource belongs (directly or cascadingly) to one or more - folders. - organization (str): - The organization that this resource belongs to, in the - format of organizations/{ORGANIZATION_NUMBER}. This field is - available when the resource belongs (directly or - cascadingly) to an organization. - asset_type (str): - The asset type of the - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name] - Example: ``cloudresourcemanager.googleapis.com/Project`` See - `Cloud Asset Inventory Supported Asset - Types `__ - for all supported asset types. - effective_tags (MutableSequence[google.cloud.asset_v1.types.EffectiveTagDetails]): - The effective tags on this resource. - """ - - full_resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - parent: str = proto.Field( - proto.STRING, - number=2, - ) - project: str = proto.Field( - proto.STRING, - number=5, - ) - folders: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - organization: str = proto.Field( - proto.STRING, - number=7, - ) - asset_type: str = proto.Field( - proto.STRING, - number=8, - ) - effective_tags: MutableSequence[gca_assets.EffectiveTagDetails] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=gca_assets.EffectiveTagDetails, - ) - - class GovernedIamPolicy(proto.Message): - r"""The IAM policies governed by the organization policies of the - [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. - - Attributes: - attached_resource (str): - The full resource name of the resource on which this IAM - policy is set. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Cloud Asset Inventory Resource Name - Format `__ - for more information. - policy (google.iam.v1.policy_pb2.Policy): - The IAM policy directly set on the given - resource. - project (str): - The project that this IAM policy belongs to, in the format - of projects/{PROJECT_NUMBER}. This field is available when - the IAM policy belongs to a project. - folders (MutableSequence[str]): - The folder(s) that this IAM policy belongs to, in the format - of folders/{FOLDER_NUMBER}. This field is available when the - IAM policy belongs (directly or cascadingly) to one or more - folders. - organization (str): - The organization that this IAM policy belongs to, in the - format of organizations/{ORGANIZATION_NUMBER}. This field is - available when the IAM policy belongs (directly or - cascadingly) to an organization. - asset_type (str): - The asset type of the - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy.attached_resource][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy.attached_resource]. - Example: ``cloudresourcemanager.googleapis.com/Project`` See - `Cloud Asset Inventory Supported Asset - Types `__ - for all supported asset types. - """ - - attached_resource: str = proto.Field( - proto.STRING, - number=1, - ) - policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=2, - message=policy_pb2.Policy, - ) - project: str = proto.Field( - proto.STRING, - number=5, - ) - folders: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - organization: str = proto.Field( - proto.STRING, - number=7, - ) - asset_type: str = proto.Field( - proto.STRING, - number=8, - ) - - class GovernedAsset(proto.Message): - r"""Represents a Google Cloud asset(resource or IAM policy) governed by - the organization policies of the - [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - governed_resource (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource): - A Google Cloud resource governed by the organization - policies of the - [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. - - This field is a member of `oneof`_ ``governed_asset``. - governed_iam_policy (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy): - An IAM policy governed by the organization policies of the - [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. - - This field is a member of `oneof`_ ``governed_asset``. - consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): - The consolidated policy for the analyzed asset. The - consolidated policy is computed by merging and evaluating - [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.policy_bundle][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.policy_bundle]. - The evaluation will respect the organization policy - `hierarchy - rules `__. - policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): - The ordered list of all organization policies from the - [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][] - to the scope specified in the request. - - If the constraint is defined with default policy, it will - also appear in the list. - """ - - governed_resource: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource', - ) - governed_iam_policy: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy' = proto.Field( - proto.MESSAGE, - number=2, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy', - ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( - proto.MESSAGE, - number=3, - message='AnalyzerOrgPolicy', - ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzerOrgPolicy', - ) - - @property - def raw_page(self): - return self - - governed_assets: MutableSequence[GovernedAsset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=GovernedAsset, - ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( - proto.MESSAGE, - number=2, - message='AnalyzerOrgPolicyConstraint', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/assets.py b/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/assets.py deleted file mode 100644 index 0fa2e5dea580..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/google/cloud/asset_v1/types/assets.py +++ /dev/null @@ -1,1687 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore -from google.cloud.osconfig_v1.types import inventory -from google.iam.v1 import policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import access_level_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import access_policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import service_perimeter_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import code_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1', - manifest={ - 'TemporalAsset', - 'TimeWindow', - 'Asset', - 'Resource', - 'RelatedAssets', - 'RelationshipAttributes', - 'RelatedAsset', - 'Tag', - 'EffectiveTagDetails', - 'ResourceSearchResult', - 'VersionedResource', - 'AttachedResource', - 'RelatedResources', - 'RelatedResource', - 'IamPolicySearchResult', - 'IamPolicyAnalysisState', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - }, -) - - -class TemporalAsset(proto.Message): - r"""An asset in Google Cloud and its temporal metadata, including - the time window when it was observed and its status during that - window. - - Attributes: - window (google.cloud.asset_v1.types.TimeWindow): - The time window when the asset data and state - was observed. - deleted (bool): - Whether the asset has been deleted or not. - asset (google.cloud.asset_v1.types.Asset): - An asset in Google Cloud. - prior_asset_state (google.cloud.asset_v1.types.TemporalAsset.PriorAssetState): - State of prior_asset. - prior_asset (google.cloud.asset_v1.types.Asset): - Prior copy of the asset. Populated if prior_asset_state is - PRESENT. Currently this is only set for responses in - Real-Time Feed. - """ - class PriorAssetState(proto.Enum): - r"""State of prior asset. - - Values: - PRIOR_ASSET_STATE_UNSPECIFIED (0): - prior_asset is not applicable for the current asset. - PRESENT (1): - prior_asset is populated correctly. - INVALID (2): - Failed to set prior_asset. - DOES_NOT_EXIST (3): - Current asset is the first known state. - DELETED (4): - prior_asset is a deletion. - """ - PRIOR_ASSET_STATE_UNSPECIFIED = 0 - PRESENT = 1 - INVALID = 2 - DOES_NOT_EXIST = 3 - DELETED = 4 - - window: 'TimeWindow' = proto.Field( - proto.MESSAGE, - number=1, - message='TimeWindow', - ) - deleted: bool = proto.Field( - proto.BOOL, - number=2, - ) - asset: 'Asset' = proto.Field( - proto.MESSAGE, - number=3, - message='Asset', - ) - prior_asset_state: PriorAssetState = proto.Field( - proto.ENUM, - number=4, - enum=PriorAssetState, - ) - prior_asset: 'Asset' = proto.Field( - proto.MESSAGE, - number=5, - message='Asset', - ) - - -class TimeWindow(proto.Message): - r"""A time window specified by its ``start_time`` and ``end_time``. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start time of the time window (exclusive). - end_time (google.protobuf.timestamp_pb2.Timestamp): - End time of the time window (inclusive). If - not specified, the current timestamp is used - instead. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class Asset(proto.Message): - r"""An asset in Google Cloud. An asset can be any resource in the Google - Cloud `resource - hierarchy `__, - a resource outside the Google Cloud resource hierarchy (such as - Google Kubernetes Engine clusters and objects), or a policy (e.g. - IAM policy), or a relationship (e.g. an INSTANCE_TO_INSTANCEGROUP - relationship). See `Supported asset - types `__ - for more information. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - update_time (google.protobuf.timestamp_pb2.Timestamp): - The last update timestamp of an asset. update_time is - updated when create/update/delete operation is performed. - name (str): - The full name of the asset. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` - - See `Resource - names `__ - for more information. - asset_type (str): - The type of the asset. Example: - ``compute.googleapis.com/Disk`` - - See `Supported asset - types `__ - for more information. - resource (google.cloud.asset_v1.types.Resource): - A representation of the resource. - iam_policy (google.iam.v1.policy_pb2.Policy): - A representation of the IAM policy set on a Google Cloud - resource. There can be a maximum of one IAM policy set on - any given resource. In addition, IAM policies inherit their - granted access scope from any policies set on parent - resources in the resource hierarchy. Therefore, the - effectively policy is the union of both the policy set on - this resource and each policy set on all of the resource's - ancestry resource levels in the hierarchy. See `this - topic `__ - for more information. - org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): - A representation of an `organization - policy `__. - There can be more than one organization policy with - different constraints set on a given resource. - access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): - Also refer to the `access policy user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): - Also refer to the `access level user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): - Also refer to the `service perimeter user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - os_inventory (google.cloud.osconfig_v1.types.Inventory): - A representation of runtime OS Inventory information. See - `this - topic `__ - for more information. - related_assets (google.cloud.asset_v1.types.RelatedAssets): - DEPRECATED. This field only presents for the - purpose of backward-compatibility. The server - will never generate responses with this field. - The related assets of the asset of one - relationship type. One asset only represents one - type of relationship. - related_asset (google.cloud.asset_v1.types.RelatedAsset): - One related asset of the current asset. - ancestors (MutableSequence[str]): - The ancestry path of an asset in Google Cloud `resource - hierarchy `__, - represented as a list of relative resource names. An - ancestry path starts with the closest ancestor in the - hierarchy and ends at root. If the asset is a project, - folder, or organization, the ancestry path starts from the - asset itself. - - Example: - ``["projects/123456789", "folders/5432", "organizations/1234"]`` - """ - - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=2, - ) - resource: 'Resource' = proto.Field( - proto.MESSAGE, - number=3, - message='Resource', - ) - iam_policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=4, - message=policy_pb2.Policy, - ) - org_policy: MutableSequence[orgpolicy_pb2.Policy] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=orgpolicy_pb2.Policy, - ) - access_policy: access_policy_pb2.AccessPolicy = proto.Field( - proto.MESSAGE, - number=7, - oneof='access_context_policy', - message=access_policy_pb2.AccessPolicy, - ) - access_level: access_level_pb2.AccessLevel = proto.Field( - proto.MESSAGE, - number=8, - oneof='access_context_policy', - message=access_level_pb2.AccessLevel, - ) - service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( - proto.MESSAGE, - number=9, - oneof='access_context_policy', - message=service_perimeter_pb2.ServicePerimeter, - ) - os_inventory: inventory.Inventory = proto.Field( - proto.MESSAGE, - number=12, - message=inventory.Inventory, - ) - related_assets: 'RelatedAssets' = proto.Field( - proto.MESSAGE, - number=13, - message='RelatedAssets', - ) - related_asset: 'RelatedAsset' = proto.Field( - proto.MESSAGE, - number=15, - message='RelatedAsset', - ) - ancestors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=10, - ) - - -class Resource(proto.Message): - r"""A representation of a Google Cloud resource. - - Attributes: - version (str): - The API version. Example: ``v1`` - discovery_document_uri (str): - The URL of the discovery document containing the resource's - JSON schema. Example: - ``https://www.googleapis.com/discovery/v1/apis/compute/v1/rest`` - - This value is unspecified for resources that do not have an - API based on a discovery document, such as Cloud Bigtable. - discovery_name (str): - The JSON schema name listed in the discovery document. - Example: ``Project`` - - This value is unspecified for resources that do not have an - API based on a discovery document, such as Cloud Bigtable. - resource_url (str): - The REST URL for accessing the resource. An HTTP ``GET`` - request using this URL returns the resource itself. Example: - ``https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`` - - This value is unspecified for resources without a REST API. - parent (str): - The full name of the immediate parent of this resource. See - `Resource - Names `__ - for more information. - - For Google Cloud assets, this value is the parent resource - defined in the `IAM policy - hierarchy `__. - Example: - ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` - data (google.protobuf.struct_pb2.Struct): - The content of the resource, in which some - sensitive fields are removed and may not be - present. - location (str): - The location of the resource in Google Cloud, - such as its zone and region. For more - information, see - https://cloud.google.com/about/locations/. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - discovery_document_uri: str = proto.Field( - proto.STRING, - number=2, - ) - discovery_name: str = proto.Field( - proto.STRING, - number=3, - ) - resource_url: str = proto.Field( - proto.STRING, - number=4, - ) - parent: str = proto.Field( - proto.STRING, - number=5, - ) - data: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - location: str = proto.Field( - proto.STRING, - number=8, - ) - - -class RelatedAssets(proto.Message): - r"""DEPRECATED. This message only presents for the purpose of - backward-compatibility. The server will never populate this message - in responses. The detailed related assets with the - ``relationship_type``. - - Attributes: - relationship_attributes (google.cloud.asset_v1.types.RelationshipAttributes): - The detailed relationship attributes. - assets (MutableSequence[google.cloud.asset_v1.types.RelatedAsset]): - The peer resources of the relationship. - """ - - relationship_attributes: 'RelationshipAttributes' = proto.Field( - proto.MESSAGE, - number=1, - message='RelationshipAttributes', - ) - assets: MutableSequence['RelatedAsset'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='RelatedAsset', - ) - - -class RelationshipAttributes(proto.Message): - r"""DEPRECATED. This message only presents for the purpose of - backward-compatibility. The server will never populate this message - in responses. The relationship attributes which include ``type``, - ``source_resource_type``, ``target_resource_type`` and ``action``. - - Attributes: - type_ (str): - The unique identifier of the relationship type. Example: - ``INSTANCE_TO_INSTANCEGROUP`` - source_resource_type (str): - The source asset type. Example: - ``compute.googleapis.com/Instance`` - target_resource_type (str): - The target asset type. Example: - ``compute.googleapis.com/Disk`` - action (str): - The detail of the relationship, e.g. ``contains``, - ``attaches`` - """ - - type_: str = proto.Field( - proto.STRING, - number=4, - ) - source_resource_type: str = proto.Field( - proto.STRING, - number=1, - ) - target_resource_type: str = proto.Field( - proto.STRING, - number=2, - ) - action: str = proto.Field( - proto.STRING, - number=3, - ) - - -class RelatedAsset(proto.Message): - r"""An asset identifier in Google Cloud which contains its name, type - and ancestors. An asset can be any resource in the Google Cloud - `resource - hierarchy `__, - a resource outside the Google Cloud resource hierarchy (such as - Google Kubernetes Engine clusters and objects), or a policy (e.g. - IAM policy). See `Supported asset - types `__ - for more information. - - Attributes: - asset (str): - The full name of the asset. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` - - See `Resource - names `__ - for more information. - asset_type (str): - The type of the asset. Example: - ``compute.googleapis.com/Disk`` - - See `Supported asset - types `__ - for more information. - ancestors (MutableSequence[str]): - The ancestors of an asset in Google Cloud `resource - hierarchy `__, - represented as a list of relative resource names. An - ancestry path starts with the closest ancestor in the - hierarchy and ends at root. - - Example: - ``["projects/123456789", "folders/5432", "organizations/1234"]`` - relationship_type (str): - The unique identifier of the relationship type. Example: - ``INSTANCE_TO_INSTANCEGROUP`` - """ - - asset: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=2, - ) - ancestors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - relationship_type: str = proto.Field( - proto.STRING, - number=4, - ) - - -class Tag(proto.Message): - r"""The key and value for a - `tag `__. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - tag_key (str): - TagKey namespaced name, in the format of - {ORG_ID}/{TAG_KEY_SHORT_NAME}. - - This field is a member of `oneof`_ ``_tag_key``. - tag_key_id (str): - TagKey ID, in the format of tagKeys/{TAG_KEY_ID}. - - This field is a member of `oneof`_ ``_tag_key_id``. - tag_value (str): - TagValue namespaced name, in the format of - {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. - - This field is a member of `oneof`_ ``_tag_value``. - tag_value_id (str): - TagValue ID, in the format of tagValues/{TAG_VALUE_ID}. - - This field is a member of `oneof`_ ``_tag_value_id``. - """ - - tag_key: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - tag_key_id: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - tag_value: str = proto.Field( - proto.STRING, - number=3, - optional=True, - ) - tag_value_id: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - - -class EffectiveTagDetails(proto.Message): - r"""The effective tags and the ancestor resources from which they - were inherited. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - attached_resource (str): - The `full resource - name `__ - of the ancestor from which an [effective_tag][] is - inherited, according to `tag - inheritance `__. - - This field is a member of `oneof`_ ``_attached_resource``. - effective_tags (MutableSequence[google.cloud.asset_v1.types.Tag]): - The effective tags inherited from the - [attached_resource][google.cloud.asset.v1.EffectiveTagDetails.attached_resource]. - Note that tags with the same key but different values may - attach to resources at a different hierarchy levels. The - lower hierarchy tag value will overwrite the higher - hierarchy tag value of the same tag key. In this case, the - tag value at the higher hierarchy level will be removed. For - more information, see `tag - inheritance `__. - """ - - attached_resource: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - effective_tags: MutableSequence['Tag'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Tag', - ) - - -class ResourceSearchResult(proto.Message): - r"""A result of Resource Search, containing information of a - cloud resource. - - Attributes: - name (str): - The full resource name of this resource. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Cloud Asset Inventory Resource Name - Format `__ - for more information. - - To search against the ``name``: - - - Use a field query. Example: ``name:instance1`` - - Use a free text query. Example: ``instance1`` - asset_type (str): - The type of this resource. Example: - ``compute.googleapis.com/Disk``. - - To search against the ``asset_type``: - - - Specify the ``asset_type`` field in your search request. - project (str): - The project that this resource belongs to, in the form of - projects/{PROJECT_NUMBER}. This field is available when the - resource belongs to a project. - - To search against ``project``: - - - Use a field query. Example: ``project:12345`` - - Use a free text query. Example: ``12345`` - - Specify the ``scope`` field as this project in your - search request. - folders (MutableSequence[str]): - The folder(s) that this resource belongs to, in the form of - folders/{FOLDER_NUMBER}. This field is available when the - resource belongs to one or more folders. - - To search against ``folders``: - - - Use a field query. Example: ``folders:(123 OR 456)`` - - Use a free text query. Example: ``123`` - - Specify the ``scope`` field as this folder in your search - request. - organization (str): - The organization that this resource belongs to, in the form - of organizations/{ORGANIZATION_NUMBER}. This field is - available when the resource belongs to an organization. - - To search against ``organization``: - - - Use a field query. Example: ``organization:123`` - - Use a free text query. Example: ``123`` - - Specify the ``scope`` field as this organization in your - search request. - display_name (str): - The display name of this resource. This field is available - only when the resource's Protobuf contains it. - - To search against the ``display_name``: - - - Use a field query. Example: ``displayName:"My Instance"`` - - Use a free text query. Example: ``"My Instance"`` - description (str): - One or more paragraphs of text description of this resource. - Maximum length could be up to 1M bytes. This field is - available only when the resource's Protobuf contains it. - - To search against the ``description``: - - - Use a field query. Example: - ``description:"important instance"`` - - Use a free text query. Example: ``"important instance"`` - location (str): - Location can be ``global``, regional like ``us-east1``, or - zonal like ``us-west1-b``. This field is available only when - the resource's Protobuf contains it. - - To search against the ``location``: - - - Use a field query. Example: ``location:us-west*`` - - Use a free text query. Example: ``us-west*`` - labels (MutableMapping[str, str]): - User labels associated with this resource. See `Labelling - and grouping Google Cloud - resources `__ - for more information. This field is available only when the - resource's Protobuf contains it. - - To search against the ``labels``: - - - Use a field query: - - - query on any label's key or value. Example: - ``labels:prod`` - - query by a given label. Example: ``labels.env:prod`` - - query by a given label's existence. Example: - ``labels.env:*`` - - - Use a free text query. Example: ``prod`` - network_tags (MutableSequence[str]): - Network tags associated with this resource. Like labels, - network tags are a type of annotations used to group Google - Cloud resources. See `Labelling Google Cloud - resources `__ - for more information. This field is available only when the - resource's Protobuf contains it. - - To search against the ``network_tags``: - - - Use a field query. Example: ``networkTags:internal`` - - Use a free text query. Example: ``internal`` - kms_key (str): - The Cloud KMS - `CryptoKey `__ - name or - `CryptoKeyVersion `__ - name. - - This field only presents for the purpose of backward - compatibility. Use the ``kms_keys`` field to retrieve Cloud - KMS key information. This field is available only when the - resource's Protobuf contains it and will only be populated - for `these resource - types `__ - for backward compatible purposes. - - To search against the ``kms_key``: - - - Use a field query. Example: ``kmsKey:key`` - - Use a free text query. Example: ``key`` - kms_keys (MutableSequence[str]): - The Cloud KMS - `CryptoKey `__ - names or - `CryptoKeyVersion `__ - names. This field is available only when the resource's - Protobuf contains it. - - To search against the ``kms_keys``: - - - Use a field query. Example: ``kmsKeys:key`` - - Use a free text query. Example: ``key`` - create_time (google.protobuf.timestamp_pb2.Timestamp): - The create timestamp of this resource, at which the resource - was created. The granularity is in seconds. Timestamp.nanos - will always be 0. This field is available only when the - resource's Protobuf contains it. - - To search against ``create_time``: - - - Use a field query. - - - value in seconds since unix epoch. Example: - ``createTime > 1609459200`` - - value in date string. Example: - ``createTime > 2021-01-01`` - - value in date-time string (must be quoted). Example: - ``createTime > "2021-01-01T00:00:00"`` - update_time (google.protobuf.timestamp_pb2.Timestamp): - The last update timestamp of this resource, at which the - resource was last modified or deleted. The granularity is in - seconds. Timestamp.nanos will always be 0. This field is - available only when the resource's Protobuf contains it. - - To search against ``update_time``: - - - Use a field query. - - - value in seconds since unix epoch. Example: - ``updateTime < 1609459200`` - - value in date string. Example: - ``updateTime < 2021-01-01`` - - value in date-time string (must be quoted). Example: - ``updateTime < "2021-01-01T00:00:00"`` - state (str): - The state of this resource. Different resources types have - different state definitions that are mapped from various - fields of different resource types. This field is available - only when the resource's Protobuf contains it. - - Example: If the resource is an instance provided by Compute - Engine, its state will include PROVISIONING, STAGING, - RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and - TERMINATED. See ``status`` definition in `API - Reference `__. - If the resource is a project provided by Resource Manager, - its state will include LIFECYCLE_STATE_UNSPECIFIED, ACTIVE, - DELETE_REQUESTED and DELETE_IN_PROGRESS. See - ``lifecycleState`` definition in `API - Reference `__. - - To search against the ``state``: - - - Use a field query. Example: ``state:RUNNING`` - - Use a free text query. Example: ``RUNNING`` - additional_attributes (google.protobuf.struct_pb2.Struct): - The additional searchable attributes of this resource. The - attributes may vary from one resource type to another. - Examples: ``projectId`` for Project, ``dnsName`` for DNS - ManagedZone. This field contains a subset of the resource - metadata fields that are returned by the List or Get APIs - provided by the corresponding Google Cloud service (e.g., - Compute Engine). see `API references and supported - searchable - attributes `__ - to see which fields are included. - - You can search values of these fields through free text - search. However, you should not consume the field - programically as the field names and values may change as - the Google Cloud service updates to a new incompatible API - version. - - To search against the ``additional_attributes``: - - - Use a free text query to match the attributes values. - Example: to search - ``additional_attributes = { dnsName: "foobar" }``, you - can issue a query ``foobar``. - parent_full_resource_name (str): - The full resource name of this resource's parent, if it has - one. To search against the ``parent_full_resource_name``: - - - Use a field query. Example: - ``parentFullResourceName:"project-name"`` - - Use a free text query. Example: ``project-name`` - versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): - Versioned resource representations of this resource. This is - repeated because there could be multiple versions of - resource representations during version migration. - - This ``versioned_resources`` field is not searchable. Some - attributes of the resource representations are exposed in - ``additional_attributes`` field, so as to allow users to - search on them. - attached_resources (MutableSequence[google.cloud.asset_v1.types.AttachedResource]): - Attached resources of this resource. For example, an - OSConfig Inventory is an attached resource of a Compute - Instance. This field is repeated because a resource could - have multiple attached resources. - - This ``attached_resources`` field is not searchable. Some - attributes of the attached resources are exposed in - ``additional_attributes`` field, so as to allow users to - search on them. - relationships (MutableMapping[str, google.cloud.asset_v1.types.RelatedResources]): - A map of related resources of this resource, keyed by the - relationship type. A relationship type is in the format of - {SourceType}*{ACTION}*\ {DestType}. Example: - ``DISK_TO_INSTANCE``, ``DISK_TO_NETWORK``, - ``INSTANCE_TO_INSTANCEGROUP``. See `supported relationship - types `__. - tag_keys (MutableSequence[str]): - This field is only present for the purpose of backward - compatibility. Use the ``tags`` field instead. - - TagKey namespaced names, in the format of - {ORG_ID}/{TAG_KEY_SHORT_NAME}. To search against the - ``tagKeys``: - - - Use a field query. Example: - - - ``tagKeys:"123456789/env*"`` - - ``tagKeys="123456789/env"`` - - ``tagKeys:"env"`` - - - Use a free text query. Example: - - - ``env`` - tag_values (MutableSequence[str]): - This field is only present for the purpose of backward - compatibility. Use the ``tags`` field instead. - - TagValue namespaced names, in the format of - {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. To - search against the ``tagValues``: - - - Use a field query. Example: - - - ``tagValues:"env"`` - - ``tagValues:"env/prod"`` - - ``tagValues:"123456789/env/prod*"`` - - ``tagValues="123456789/env/prod"`` - - - Use a free text query. Example: - - - ``prod`` - tag_value_ids (MutableSequence[str]): - This field is only present for the purpose of backward - compatibility. Use the ``tags`` field instead. - - TagValue IDs, in the format of tagValues/{TAG_VALUE_ID}. To - search against the ``tagValueIds``: - - - Use a field query. Example: - - - ``tagValueIds="tagValues/456"`` - - - Use a free text query. Example: - - - ``456`` - tags (MutableSequence[google.cloud.asset_v1.types.Tag]): - The tags directly attached to this resource. - - To search against the ``tags``: - - - Use a field query. Example: - - - ``tagKeys:"123456789/env*"`` - - ``tagKeys="123456789/env"`` - - ``tagKeys:"env"`` - - ``tagKeyIds="tagKeys/123"`` - - ``tagValues:"env"`` - - ``tagValues:"env/prod"`` - - ``tagValues:"123456789/env/prod*"`` - - ``tagValues="123456789/env/prod"`` - - ``tagValueIds="tagValues/456"`` - - - Use a free text query. Example: - - - ``env/prod`` - effective_tags (MutableSequence[google.cloud.asset_v1.types.EffectiveTagDetails]): - The effective tags on this resource. All of the tags that - are both attached to and inherited by a resource are - collectively called the effective tags. For more - information, see `tag - inheritance `__. - - To search against the ``effective_tags``: - - - Use a field query. Example: - - - ``effectiveTagKeys:"123456789/env*"`` - - ``effectiveTagKeys="123456789/env"`` - - ``effectiveTagKeys:"env"`` - - ``effectiveTagKeyIds="tagKeys/123"`` - - ``effectiveTagValues:"env"`` - - ``effectiveTagValues:"env/prod"`` - - ``effectiveTagValues:"123456789/env/prod*"`` - - ``effectiveTagValues="123456789/env/prod"`` - - ``effectiveTagValueIds="tagValues/456"`` - parent_asset_type (str): - The type of this resource's immediate parent, if there is - one. - - To search against the ``parent_asset_type``: - - - Use a field query. Example: - ``parentAssetType:"cloudresourcemanager.googleapis.com/Project"`` - - Use a free text query. Example: - ``cloudresourcemanager.googleapis.com/Project`` - scc_security_marks (MutableMapping[str, str]): - The actual content of Security Command Center security marks - associated with the asset. - - To search against SCC SecurityMarks field: - - - Use a field query: - - - query by a given key value pair. Example: - ``sccSecurityMarks.foo=bar`` - - query by a given key's existence. Example: - ``sccSecurityMarks.foo:*`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=2, - ) - project: str = proto.Field( - proto.STRING, - number=3, - ) - folders: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=17, - ) - organization: str = proto.Field( - proto.STRING, - number=18, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - location: str = proto.Field( - proto.STRING, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - network_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - kms_key: str = proto.Field( - proto.STRING, - number=10, - ) - kms_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=28, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - state: str = proto.Field( - proto.STRING, - number=13, - ) - additional_attributes: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=9, - message=struct_pb2.Struct, - ) - parent_full_resource_name: str = proto.Field( - proto.STRING, - number=19, - ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=16, - message='VersionedResource', - ) - attached_resources: MutableSequence['AttachedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=20, - message='AttachedResource', - ) - relationships: MutableMapping[str, 'RelatedResources'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=21, - message='RelatedResources', - ) - tag_keys: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=23, - ) - tag_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=25, - ) - tag_value_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=26, - ) - tags: MutableSequence['Tag'] = proto.RepeatedField( - proto.MESSAGE, - number=29, - message='Tag', - ) - effective_tags: MutableSequence['EffectiveTagDetails'] = proto.RepeatedField( - proto.MESSAGE, - number=30, - message='EffectiveTagDetails', - ) - parent_asset_type: str = proto.Field( - proto.STRING, - number=103, - ) - scc_security_marks: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=32, - ) - - -class VersionedResource(proto.Message): - r"""Resource representation as defined by the corresponding - service providing the resource for a given API version. - - Attributes: - version (str): - API version of the resource. - - Example: If the resource is an instance provided by Compute - Engine v1 API as defined in - ``https://cloud.google.com/compute/docs/reference/rest/v1/instances``, - version will be "v1". - resource (google.protobuf.struct_pb2.Struct): - JSON representation of the resource as defined by the - corresponding service providing this resource. - - Example: If the resource is an instance provided by Compute - Engine, this field will contain the JSON representation of - the instance as defined by Compute Engine: - ``https://cloud.google.com/compute/docs/reference/rest/v1/instances``. - - You can find the resource definition for each supported - resource type in this table: - ``https://cloud.google.com/asset-inventory/docs/supported-asset-types`` - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - resource: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Struct, - ) - - -class AttachedResource(proto.Message): - r"""Attached resource representation, which is defined by the - corresponding service provider. It represents an attached - resource's payload. - - Attributes: - asset_type (str): - The type of this attached resource. - - Example: ``osconfig.googleapis.com/Inventory`` - - You can find the supported attached asset types of each - resource in this table: - ``https://cloud.google.com/asset-inventory/docs/supported-asset-types`` - versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): - Versioned resource representations of this - attached resource. This is repeated because - there could be multiple versions of the attached - resource representations during version - migration. - """ - - asset_type: str = proto.Field( - proto.STRING, - number=1, - ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='VersionedResource', - ) - - -class RelatedResources(proto.Message): - r"""The related resources of the primary resource. - - Attributes: - related_resources (MutableSequence[google.cloud.asset_v1.types.RelatedResource]): - The detailed related resources of the primary - resource. - """ - - related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='RelatedResource', - ) - - -class RelatedResource(proto.Message): - r"""The detailed related resource. - - Attributes: - asset_type (str): - The type of the asset. Example: - ``compute.googleapis.com/Instance`` - full_resource_name (str): - The full resource name of the related resource. Example: - ``//compute.googleapis.com/projects/my_proj_123/zones/instance/instance123`` - """ - - asset_type: str = proto.Field( - proto.STRING, - number=1, - ) - full_resource_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class IamPolicySearchResult(proto.Message): - r"""A result of IAM Policy search, containing information of an - IAM policy. - - Attributes: - resource (str): - The full resource name of the resource associated with this - IAM policy. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Cloud Asset Inventory Resource Name - Format `__ - for more information. - - To search against the ``resource``: - - - use a field query. Example: - ``resource:organizations/123`` - asset_type (str): - The type of the resource associated with this IAM policy. - Example: ``compute.googleapis.com/Disk``. - - To search against the ``asset_type``: - - - specify the ``asset_types`` field in your search request. - project (str): - The project that the associated Google Cloud resource - belongs to, in the form of projects/{PROJECT_NUMBER}. If an - IAM policy is set on a resource (like VM instance, Cloud - Storage bucket), the project field will indicate the project - that contains the resource. If an IAM policy is set on a - folder or orgnization, this field will be empty. - - To search against the ``project``: - - - specify the ``scope`` field as this project in your - search request. - folders (MutableSequence[str]): - The folder(s) that the IAM policy belongs to, in the form of - folders/{FOLDER_NUMBER}. This field is available when the - IAM policy belongs to one or more folders. - - To search against ``folders``: - - - use a field query. Example: ``folders:(123 OR 456)`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this folder in your search - request. - organization (str): - The organization that the IAM policy belongs to, in the form - of organizations/{ORGANIZATION_NUMBER}. This field is - available when the IAM policy belongs to an organization. - - To search against ``organization``: - - - use a field query. Example: ``organization:123`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this organization in your - search request. - policy (google.iam.v1.policy_pb2.Policy): - The IAM policy directly set on the given resource. Note that - the original IAM policy can contain multiple bindings. This - only contains the bindings that match the given query. For - queries that don't contain a constrain on policies (e.g., an - empty query), this contains all the bindings. - - To search against the ``policy`` bindings: - - - use a field query: - - - query by the policy contained members. Example: - ``policy:amy@gmail.com`` - - query by the policy contained roles. Example: - ``policy:roles/compute.admin`` - - query by the policy contained roles' included - permissions. Example: - ``policy.role.permissions:compute.instances.create`` - explanation (google.cloud.asset_v1.types.IamPolicySearchResult.Explanation): - Explanation about the IAM policy search - result. It contains additional information to - explain why the search result matches the query. - """ - - class Explanation(proto.Message): - r"""Explanation about the IAM policy search result. - - Attributes: - matched_permissions (MutableMapping[str, google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.Permissions]): - The map from roles to their included permissions that match - the permission query (i.e., a query containing - ``policy.role.permissions:``). Example: if query - ``policy.role.permissions:compute.disk.get`` matches a - policy binding that contains owner role, the - matched_permissions will be - ``{"roles/owner": ["compute.disk.get"]}``. The roles can - also be found in the returned ``policy`` bindings. Note that - the map is populated only for requests with permission - queries. - """ - - class Permissions(proto.Message): - r"""IAM permissions - - Attributes: - permissions (MutableSequence[str]): - A list of permissions. A sample permission string: - ``compute.disk.get``. - """ - - permissions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - matched_permissions: MutableMapping[str, 'IamPolicySearchResult.Explanation.Permissions'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='IamPolicySearchResult.Explanation.Permissions', - ) - - resource: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=5, - ) - project: str = proto.Field( - proto.STRING, - number=2, - ) - folders: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - organization: str = proto.Field( - proto.STRING, - number=7, - ) - policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=3, - message=policy_pb2.Policy, - ) - explanation: Explanation = proto.Field( - proto.MESSAGE, - number=4, - message=Explanation, - ) - - -class IamPolicyAnalysisState(proto.Message): - r"""Represents the detailed state of an entity under analysis, - such as a resource, an identity or an access. - - Attributes: - code (google.rpc.code_pb2.Code): - The Google standard error code that best describes the - state. For example: - - - OK means the analysis on this entity has been - successfully finished; - - PERMISSION_DENIED means an access denied error is - encountered; - - DEADLINE_EXCEEDED means the analysis on this entity - hasn't been started in time; - cause (str): - The human-readable description of the cause - of failure. - """ - - code: code_pb2.Code = proto.Field( - proto.ENUM, - number=1, - enum=code_pb2.Code, - ) - cause: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ConditionEvaluation(proto.Message): - r"""The condition evaluation. - - Attributes: - evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): - The evaluation result. - """ - class EvaluationValue(proto.Enum): - r"""Value of this expression. - - Values: - EVALUATION_VALUE_UNSPECIFIED (0): - Reserved for future use. - TRUE (1): - The evaluation result is ``true``. - FALSE (2): - The evaluation result is ``false``. - CONDITIONAL (3): - The evaluation result is ``conditional`` when the condition - expression contains variables that are either missing input - values or have not been supported by Policy Analyzer yet. - """ - EVALUATION_VALUE_UNSPECIFIED = 0 - TRUE = 1 - FALSE = 2 - CONDITIONAL = 3 - - evaluation_value: EvaluationValue = proto.Field( - proto.ENUM, - number=1, - enum=EvaluationValue, - ) - - -class IamPolicyAnalysisResult(proto.Message): - r"""IAM Policy analysis result, consisting of one IAM policy - binding and derived access control lists. - - Attributes: - attached_resource_full_name (str): - The `full resource - name `__ - of the resource to which the - [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] - policy attaches. - iam_binding (google.iam.v1.policy_pb2.Binding): - The IAM policy binding under analysis. - access_control_lists (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.AccessControlList]): - The access control lists derived from the - [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] - that match or potentially match resource and access - selectors specified in the request. - identity_list (google.cloud.asset_v1.types.IamPolicyAnalysisResult.IdentityList): - The identity list derived from members of the - [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] - that match or potentially match identity selector specified - in the request. - fully_explored (bool): - Represents whether all analyses on the - [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] - have successfully finished. - """ - - class Resource(proto.Message): - r"""A Google Cloud resource under analysis. - - Attributes: - full_resource_name (str): - The `full resource - name `__ - analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): - The analysis state of this resource. - """ - - full_resource_name: str = proto.Field( - proto.STRING, - number=1, - ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisState', - ) - - class Access(proto.Message): - r"""An IAM role or permission under analysis. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - role (str): - The role. - - This field is a member of `oneof`_ ``oneof_access``. - permission (str): - The permission. - - This field is a member of `oneof`_ ``oneof_access``. - analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): - The analysis state of this access. - """ - - role: str = proto.Field( - proto.STRING, - number=1, - oneof='oneof_access', - ) - permission: str = proto.Field( - proto.STRING, - number=2, - oneof='oneof_access', - ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( - proto.MESSAGE, - number=3, - message='IamPolicyAnalysisState', - ) - - class Identity(proto.Message): - r"""An identity under analysis. - - Attributes: - name (str): - The identity of members, formatted as appear in an `IAM - policy - binding `__. - For example, they might be formatted like the following: - - - user:foo@google.com - - group:group1@google.com - - serviceAccount:s1@prj1.iam.gserviceaccount.com - - projectOwner:some_project_id - - domain:google.com - - allUsers - analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): - The analysis state of this identity. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisState', - ) - - class Edge(proto.Message): - r"""A directional edge. - - Attributes: - source_node (str): - The source node of the edge. For example, it - could be a full resource name for a resource - node or an email of an identity. - target_node (str): - The target node of the edge. For example, it - could be a full resource name for a resource - node or an email of an identity. - """ - - source_node: str = proto.Field( - proto.STRING, - number=1, - ) - target_node: str = proto.Field( - proto.STRING, - number=2, - ) - - class AccessControlList(proto.Message): - r"""An access control list, derived from the above IAM policy binding, - which contains a set of resources and accesses. May include one item - from each set to compose an access control entry. - - NOTICE that there could be multiple access control lists for one IAM - policy binding. The access control lists are created based on - resource and access combinations. - - For example, assume we have the following cases in one IAM policy - binding: - - - Permission P1 and P2 apply to resource R1 and R2; - - Permission P3 applies to resource R2 and R3; - - This will result in the following access control lists: - - - AccessControlList 1: [R1, R2], [P1, P2] - - AccessControlList 2: [R2, R3], [P3] - - Attributes: - resources (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Resource]): - The resources that match one of the following conditions: - - - The resource_selector, if it is specified in request; - - Otherwise, resources reachable from the policy attached - resource. - accesses (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Access]): - The accesses that match one of the following conditions: - - - The access_selector, if it is specified in request; - - Otherwise, access specifiers reachable from the policy - binding's role. - resource_edges (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): - Resource edges of the graph starting from the policy - attached resource to any descendant resources. The - [Edge.source_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.source_node] - contains the full resource name of a parent resource and - [Edge.target_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.target_node] - contains the full resource name of a child resource. This - field is present only if the output_resource_edges option is - enabled in request. - condition_evaluation (google.cloud.asset_v1.types.ConditionEvaluation): - Condition evaluation for this - AccessControlList, if there is a condition - defined in the above IAM policy binding. - """ - - resources: MutableSequence['IamPolicyAnalysisResult.Resource'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisResult.Resource', - ) - accesses: MutableSequence['IamPolicyAnalysisResult.Access'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisResult.Access', - ) - resource_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='IamPolicyAnalysisResult.Edge', - ) - condition_evaluation: 'ConditionEvaluation' = proto.Field( - proto.MESSAGE, - number=4, - message='ConditionEvaluation', - ) - - class IdentityList(proto.Message): - r"""The identities and group edges. - - Attributes: - identities (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Identity]): - Only the identities that match one of the following - conditions will be presented: - - - The identity_selector, if it is specified in request; - - Otherwise, identities reachable from the policy binding's - members. - group_edges (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): - Group identity edges of the graph starting from the - binding's group members to any node of the - [identities][google.cloud.asset.v1.IamPolicyAnalysisResult.IdentityList.identities]. - The - [Edge.source_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.source_node] - contains a group, such as ``group:parent@google.com``. The - [Edge.target_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.target_node] - contains a member of the group, such as - ``group:child@google.com`` or ``user:foo@google.com``. This - field is present only if the output_group_edges option is - enabled in request. - """ - - identities: MutableSequence['IamPolicyAnalysisResult.Identity'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='IamPolicyAnalysisResult.Identity', - ) - group_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='IamPolicyAnalysisResult.Edge', - ) - - attached_resource_full_name: str = proto.Field( - proto.STRING, - number=1, - ) - iam_binding: policy_pb2.Binding = proto.Field( - proto.MESSAGE, - number=2, - message=policy_pb2.Binding, - ) - access_control_lists: MutableSequence[AccessControlList] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=AccessControlList, - ) - identity_list: IdentityList = proto.Field( - proto.MESSAGE, - number=4, - message=IdentityList, - ) - fully_explored: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1/mypy.ini b/owl-bot-staging/google-cloud-asset/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-asset/v1/noxfile.py b/owl-bot-staging/google-cloud-asset/v1/noxfile.py deleted file mode 100644 index 2ca01db6293c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-asset' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py deleted file mode 100644 index 8369402d5e70..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_analyze_iam_policy(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - request = asset_v1.AnalyzeIamPolicyRequest( - analysis_query=analysis_query, - ) - - # Make the request - response = await client.analyze_iam_policy(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py deleted file mode 100644 index 90a5878ac05a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeIamPolicyLongrunning -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_analyze_iam_policy_longrunning(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - output_config = asset_v1.IamPolicyAnalysisOutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.AnalyzeIamPolicyLongrunningRequest( - analysis_query=analysis_query, - output_config=output_config, - ) - - # Make the request - operation = client.analyze_iam_policy_longrunning(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py deleted file mode 100644 index 1f246a28937e..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeIamPolicyLongrunning -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_analyze_iam_policy_longrunning(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - output_config = asset_v1.IamPolicyAnalysisOutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.AnalyzeIamPolicyLongrunningRequest( - analysis_query=analysis_query, - output_config=output_config, - ) - - # Make the request - operation = client.analyze_iam_policy_longrunning(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py deleted file mode 100644 index 73dd37f5cab0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_analyze_iam_policy(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - analysis_query = asset_v1.IamPolicyAnalysisQuery() - analysis_query.scope = "scope_value" - - request = asset_v1.AnalyzeIamPolicyRequest( - analysis_query=analysis_query, - ) - - # Make the request - response = client.analyze_iam_policy(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py deleted file mode 100644 index ab5d6f2a7560..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeMove -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeMove_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_analyze_move(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeMoveRequest( - resource="resource_value", - destination_parent="destination_parent_value", - ) - - # Make the request - response = await client.analyze_move(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeMove_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py deleted file mode 100644 index e886b920c49a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeMove -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeMove_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_analyze_move(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeMoveRequest( - resource="resource_value", - destination_parent="destination_parent_value", - ) - - # Make the request - response = client.analyze_move(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeMove_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py deleted file mode 100644 index 552d1ecc6eb2..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeOrgPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_analyze_org_policies(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPoliciesRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py deleted file mode 100644 index b56d918846fb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeOrgPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_analyze_org_policies(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPoliciesRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py deleted file mode 100644 index adb76c21c6a3..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeOrgPolicyGovernedAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_analyze_org_policy_governed_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py deleted file mode 100644 index 63efeb54057e..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeOrgPolicyGovernedAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_analyze_org_policy_governed_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py deleted file mode 100644 index 66dc1dab9063..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeOrgPolicyGovernedContainers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_analyze_org_policy_governed_containers(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_containers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py deleted file mode 100644 index edd19ed69fbb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeOrgPolicyGovernedContainers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_analyze_org_policy_governed_containers(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( - scope="scope_value", - constraint="constraint_value", - ) - - # Make the request - page_result = client.analyze_org_policy_governed_containers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py deleted file mode 100644 index 9655b814eb65..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchGetAssetsHistory -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_batch_get_assets_history(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetAssetsHistoryRequest( - parent="parent_value", - ) - - # Make the request - response = await client.batch_get_assets_history(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py deleted file mode 100644 index 954d23bf7dce..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchGetAssetsHistory -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_batch_get_assets_history(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetAssetsHistoryRequest( - parent="parent_value", - ) - - # Make the request - response = client.batch_get_assets_history(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py deleted file mode 100644 index 7bf645687401..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchGetEffectiveIamPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_batch_get_effective_iam_policies(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetEffectiveIamPoliciesRequest( - scope="scope_value", - names=['names_value1', 'names_value2'], - ) - - # Make the request - response = await client.batch_get_effective_iam_policies(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py deleted file mode 100644 index 8e173d62bda9..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchGetEffectiveIamPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_batch_get_effective_iam_policies(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.BatchGetEffectiveIamPoliciesRequest( - scope="scope_value", - names=['names_value1', 'names_value2'], - ) - - # Make the request - response = client.batch_get_effective_iam_policies(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py deleted file mode 100644 index 6cc4b2f9521a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_CreateFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_create_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = await client.create_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_CreateFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py deleted file mode 100644 index f5237ca5e998..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_CreateFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_create_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = client.create_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_CreateFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py deleted file mode 100644 index 7c9bf95a48d1..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_CreateSavedQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_create_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.CreateSavedQueryRequest( - parent="parent_value", - saved_query_id="saved_query_id_value", - ) - - # Make the request - response = await client.create_saved_query(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_CreateSavedQuery_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py deleted file mode 100644 index 746fcbda3656..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_create_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.CreateSavedQueryRequest( - parent="parent_value", - saved_query_id="saved_query_id_value", - ) - - # Make the request - response = client.create_saved_query(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py deleted file mode 100644 index 67a09ebd009a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_DeleteFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_delete_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - await client.delete_feed(request=request) - - -# [END cloudasset_v1_generated_AssetService_DeleteFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py deleted file mode 100644 index f86f14507e30..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_DeleteFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_delete_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - client.delete_feed(request=request) - - -# [END cloudasset_v1_generated_AssetService_DeleteFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py deleted file mode 100644 index daf5c52ac19c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_DeleteSavedQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_delete_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.DeleteSavedQueryRequest( - name="name_value", - ) - - # Make the request - await client.delete_saved_query(request=request) - - -# [END cloudasset_v1_generated_AssetService_DeleteSavedQuery_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py deleted file mode 100644 index f556d9389662..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_delete_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.DeleteSavedQueryRequest( - name="name_value", - ) - - # Make the request - client.delete_saved_query(request=request) - - -# [END cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py deleted file mode 100644 index d6ce3ef29c0a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ExportAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_export_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - output_config = asset_v1.OutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.ExportAssetsRequest( - parent="parent_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_assets(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_ExportAssets_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py deleted file mode 100644 index 3d1227c3717a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ExportAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_export_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - output_config = asset_v1.OutputConfig() - output_config.gcs_destination.uri = "uri_value" - - request = asset_v1.ExportAssetsRequest( - parent="parent_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_assets(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_ExportAssets_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py deleted file mode 100644 index 8f28ebec4b74..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_GetFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_get_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = await client.get_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_GetFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py deleted file mode 100644 index 64a8156dd3e3..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_GetFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_get_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = client.get_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_GetFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py deleted file mode 100644 index bd5835b75eba..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_GetSavedQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_get_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.GetSavedQueryRequest( - name="name_value", - ) - - # Make the request - response = await client.get_saved_query(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_GetSavedQuery_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py deleted file mode 100644 index 68c585047a7f..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_GetSavedQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_get_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.GetSavedQueryRequest( - name="name_value", - ) - - # Make the request - response = client.get_saved_query(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_GetSavedQuery_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py deleted file mode 100644 index 36af8779f4ea..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ListAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_list_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_ListAssets_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py deleted file mode 100644 index 19e076231458..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ListAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_list_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_ListAssets_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py deleted file mode 100644 index b0c43ba61881..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFeeds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ListFeeds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_list_feeds(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_feeds(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_ListFeeds_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py deleted file mode 100644 index cb5da42cf621..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFeeds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ListFeeds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_list_feeds(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_feeds(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_ListFeeds_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py deleted file mode 100644 index 98079450807b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSavedQueries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ListSavedQueries_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_list_saved_queries(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.ListSavedQueriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_saved_queries(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_ListSavedQueries_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py deleted file mode 100644 index 09b57384c4da..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSavedQueries -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_ListSavedQueries_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_list_saved_queries(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.ListSavedQueriesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_saved_queries(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_ListSavedQueries_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py deleted file mode 100644 index 7616115c304b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for QueryAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_QueryAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_query_assets(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.QueryAssetsRequest( - statement="statement_value", - parent="parent_value", - ) - - # Make the request - response = await client.query_assets(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_QueryAssets_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py deleted file mode 100644 index 177f021b5382..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for QueryAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_QueryAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_query_assets(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.QueryAssetsRequest( - statement="statement_value", - parent="parent_value", - ) - - # Make the request - response = client.query_assets(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_QueryAssets_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py deleted file mode 100644 index 4417191b56ad..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllIamPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_search_all_iam_policies(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py deleted file mode 100644 index 23e8b1f9fecc..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllIamPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_search_all_iam_policies(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py deleted file mode 100644 index 6ff7c119df12..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_SearchAllResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_search_all_resources(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_SearchAllResources_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py deleted file mode 100644 index c6ef7ed258ac..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_SearchAllResources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_search_all_resources(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1_generated_AssetService_SearchAllResources_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py deleted file mode 100644 index 2c15208140d6..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_UpdateFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_update_feed(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = await client.update_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_UpdateFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py deleted file mode 100644 index f867fd26d768..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_UpdateFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_update_feed(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1.Feed() - feed.name = "name_value" - - request = asset_v1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = client.update_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_UpdateFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py deleted file mode 100644 index b55c69cba951..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -async def sample_update_saved_query(): - # Create a client - client = asset_v1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) - - # Make the request - response = await client.update_saved_query(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py deleted file mode 100644 index f187066a055a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSavedQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1 - - -def sample_update_saved_query(): - # Create a client - client = asset_v1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1.UpdateSavedQueryRequest( - ) - - # Make the request - response = client.update_saved_query(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json deleted file mode 100644 index 211efb19c2ad..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ /dev/null @@ -1,3746 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.asset.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-asset", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_iam_policy_longrunning", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeIamPolicyLongrunning" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "analyze_iam_policy_longrunning" - }, - "description": "Sample for AnalyzeIamPolicyLongrunning", - "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeIamPolicyLongrunning" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "analyze_iam_policy_longrunning" - }, - "description": "Sample for AnalyzeIamPolicyLongrunning", - "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_iam_policy", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicy", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", - "shortName": "analyze_iam_policy" - }, - "description": "Sample for AnalyzeIamPolicy", - "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicy", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", - "shortName": "analyze_iam_policy" - }, - "description": "Sample for AnalyzeIamPolicy", - "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_move", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeMove", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeMove" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeMoveRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", - "shortName": "analyze_move" - }, - "description": "Sample for AnalyzeMove", - "file": "cloudasset_v1_generated_asset_service_analyze_move_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeMove_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_move_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_move", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeMove", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeMove" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeMoveRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", - "shortName": "analyze_move" - }, - "description": "Sample for AnalyzeMove", - "file": "cloudasset_v1_generated_asset_service_analyze_move_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeMove_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_move_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policies", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeOrgPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "constraint", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager", - "shortName": "analyze_org_policies" - }, - "description": "Sample for AnalyzeOrgPolicies", - "file": "cloudasset_v1_generated_asset_service_analyze_org_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_org_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeOrgPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "constraint", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager", - "shortName": "analyze_org_policies" - }, - "description": "Sample for AnalyzeOrgPolicies", - "file": "cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policy_governed_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeOrgPolicyGovernedAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "constraint", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager", - "shortName": "analyze_org_policy_governed_assets" - }, - "description": "Sample for AnalyzeOrgPolicyGovernedAssets", - "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeOrgPolicyGovernedAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "constraint", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager", - "shortName": "analyze_org_policy_governed_assets" - }, - "description": "Sample for AnalyzeOrgPolicyGovernedAssets", - "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policy_governed_containers", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeOrgPolicyGovernedContainers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "constraint", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager", - "shortName": "analyze_org_policy_governed_containers" - }, - "description": "Sample for AnalyzeOrgPolicyGovernedContainers", - "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "AnalyzeOrgPolicyGovernedContainers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "constraint", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager", - "shortName": "analyze_org_policy_governed_containers" - }, - "description": "Sample for AnalyzeOrgPolicyGovernedContainers", - "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_assets_history", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "BatchGetAssetsHistory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", - "shortName": "batch_get_assets_history" - }, - "description": "Sample for BatchGetAssetsHistory", - "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "BatchGetAssetsHistory" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", - "shortName": "batch_get_assets_history" - }, - "description": "Sample for BatchGetAssetsHistory", - "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_effective_iam_policies", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "BatchGetEffectiveIamPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", - "shortName": "batch_get_effective_iam_policies" - }, - "description": "Sample for BatchGetEffectiveIamPolicies", - "file": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "BatchGetEffectiveIamPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", - "shortName": "batch_get_effective_iam_policies" - }, - "description": "Sample for BatchGetEffectiveIamPolicies", - "file": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "CreateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.CreateFeedRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "create_feed" - }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_create_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.create_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "CreateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.CreateFeedRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "create_feed" - }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_create_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "CreateSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.CreateSavedQueryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "saved_query", - "type": "google.cloud.asset_v1.types.SavedQuery" - }, - { - "name": "saved_query_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.SavedQuery", - "shortName": "create_saved_query" - }, - "description": "Sample for CreateSavedQuery", - "file": "cloudasset_v1_generated_asset_service_create_saved_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateSavedQuery_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_create_saved_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.create_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "CreateSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.CreateSavedQueryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "saved_query", - "type": "google.cloud.asset_v1.types.SavedQuery" - }, - { - "name": "saved_query_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.SavedQuery", - "shortName": "create_saved_query" - }, - "description": "Sample for CreateSavedQuery", - "file": "cloudasset_v1_generated_asset_service_create_saved_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateSavedQuery_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_create_saved_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "DeleteFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.DeleteFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_feed" - }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_delete_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "DeleteFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.DeleteFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_feed" - }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_delete_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "DeleteSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.DeleteSavedQueryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_saved_query" - }, - "description": "Sample for DeleteSavedQuery", - "file": "cloudasset_v1_generated_asset_service_delete_saved_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteSavedQuery_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_delete_saved_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "DeleteSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.DeleteSavedQueryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_saved_query" - }, - "description": "Sample for DeleteSavedQuery", - "file": "cloudasset_v1_generated_asset_service_delete_saved_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_delete_saved_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.export_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ExportAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ExportAssetsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_assets" - }, - "description": "Sample for ExportAssets", - "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_export_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.export_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ExportAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ExportAssetsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_assets" - }, - "description": "Sample for ExportAssets", - "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_export_assets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "GetFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.GetFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "get_feed" - }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_get_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.get_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "GetFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.GetFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "get_feed" - }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_get_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "GetSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.GetSavedQueryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.SavedQuery", - "shortName": "get_saved_query" - }, - "description": "Sample for GetSavedQuery", - "file": "cloudasset_v1_generated_asset_service_get_saved_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetSavedQuery_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_get_saved_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.get_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "GetSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.GetSavedQueryRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.SavedQuery", - "shortName": "get_saved_query" - }, - "description": "Sample for GetSavedQuery", - "file": "cloudasset_v1_generated_asset_service_get_saved_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetSavedQuery_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_get_saved_query_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_list_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_list_assets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_feeds", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListFeeds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", - "shortName": "list_feeds" - }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_list_feeds_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_feeds", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListFeeds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", - "shortName": "list_feeds" - }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_list_feeds_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_saved_queries", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListSavedQueries", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListSavedQueries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ListSavedQueriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager", - "shortName": "list_saved_queries" - }, - "description": "Sample for ListSavedQueries", - "file": "cloudasset_v1_generated_asset_service_list_saved_queries_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListSavedQueries_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_list_saved_queries_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_saved_queries", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListSavedQueries", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListSavedQueries" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.ListSavedQueriesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager", - "shortName": "list_saved_queries" - }, - "description": "Sample for ListSavedQueries", - "file": "cloudasset_v1_generated_asset_service_list_saved_queries_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListSavedQueries_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_list_saved_queries_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.query_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.QueryAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "QueryAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.QueryAssetsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", - "shortName": "query_assets" - }, - "description": "Sample for QueryAssets", - "file": "cloudasset_v1_generated_asset_service_query_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_QueryAssets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_query_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.query_assets", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.QueryAssets", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "QueryAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.QueryAssetsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", - "shortName": "query_assets" - }, - "description": "Sample for QueryAssets", - "file": "cloudasset_v1_generated_asset_service_query_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_QueryAssets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_query_assets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.search_all_iam_policies", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.SearchAllIamPolicies", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllIamPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.SearchAllIamPoliciesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager", - "shortName": "search_all_iam_policies" - }, - "description": "Sample for SearchAllIamPolicies", - "file": "cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.SearchAllIamPolicies", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllIamPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.SearchAllIamPoliciesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager", - "shortName": "search_all_iam_policies" - }, - "description": "Sample for SearchAllIamPolicies", - "file": "cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.search_all_resources", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.SearchAllResources", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.SearchAllResourcesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "asset_types", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager", - "shortName": "search_all_resources" - }, - "description": "Sample for SearchAllResources", - "file": "cloudasset_v1_generated_asset_service_search_all_resources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_search_all_resources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.search_all_resources", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.SearchAllResources", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.SearchAllResourcesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "asset_types", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager", - "shortName": "search_all_resources" - }, - "description": "Sample for SearchAllResources", - "file": "cloudasset_v1_generated_asset_service_search_all_resources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_search_all_resources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.update_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.UpdateFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "UpdateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.UpdateFeedRequest" - }, - { - "name": "feed", - "type": "google.cloud.asset_v1.types.Feed" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "update_feed" - }, - "description": "Sample for UpdateFeed", - "file": "cloudasset_v1_generated_asset_service_update_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_update_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.update_feed", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.UpdateFeed", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "UpdateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.UpdateFeedRequest" - }, - { - "name": "feed", - "type": "google.cloud.asset_v1.types.Feed" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "update_feed" - }, - "description": "Sample for UpdateFeed", - "file": "cloudasset_v1_generated_asset_service_update_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_update_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.update_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.UpdateSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "UpdateSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.UpdateSavedQueryRequest" - }, - { - "name": "saved_query", - "type": "google.cloud.asset_v1.types.SavedQuery" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.SavedQuery", - "shortName": "update_saved_query" - }, - "description": "Sample for UpdateSavedQuery", - "file": "cloudasset_v1_generated_asset_service_update_saved_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_UpdateSavedQuery_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_update_saved_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.update_saved_query", - "method": { - "fullName": "google.cloud.asset.v1.AssetService.UpdateSavedQuery", - "service": { - "fullName": "google.cloud.asset.v1.AssetService", - "shortName": "AssetService" - }, - "shortName": "UpdateSavedQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1.types.UpdateSavedQueryRequest" - }, - { - "name": "saved_query", - "type": "google.cloud.asset_v1.types.SavedQuery" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1.types.SavedQuery", - "shortName": "update_saved_query" - }, - "description": "Sample for UpdateSavedQuery", - "file": "cloudasset_v1_generated_asset_service_update_saved_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1_generated_asset_service_update_saved_query_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-asset/v1/scripts/fixup_asset_v1_keywords.py b/owl-bot-staging/google-cloud-asset/v1/scripts/fixup_asset_v1_keywords.py deleted file mode 100644 index 113392c7d9b5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/scripts/fixup_asset_v1_keywords.py +++ /dev/null @@ -1,198 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assetCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_iam_policy': ('analysis_query', 'saved_analysis_query', 'execution_timeout', ), - 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', 'saved_analysis_query', ), - 'analyze_move': ('resource', 'destination_parent', 'view', ), - 'analyze_org_policies': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), - 'analyze_org_policy_governed_assets': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), - 'analyze_org_policy_governed_containers': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), - 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', 'relationship_types', ), - 'batch_get_effective_iam_policies': ('scope', 'names', ), - 'create_feed': ('parent', 'feed_id', 'feed', ), - 'create_saved_query': ('parent', 'saved_query', 'saved_query_id', ), - 'delete_feed': ('name', ), - 'delete_saved_query': ('name', ), - 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', 'relationship_types', ), - 'get_feed': ('name', ), - 'get_saved_query': ('name', ), - 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', 'relationship_types', ), - 'list_feeds': ('parent', ), - 'list_saved_queries': ('parent', 'filter', 'page_size', 'page_token', ), - 'query_assets': ('parent', 'statement', 'job_reference', 'page_size', 'page_token', 'timeout', 'read_time_window', 'read_time', 'output_config', ), - 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), - 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', 'read_mask', ), - 'update_feed': ('feed', 'update_mask', ), - 'update_saved_query': ('saved_query', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assetCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the asset client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-asset/v1/setup.py b/owl-bot-staging/google-cloud-asset/v1/setup.py deleted file mode 100644 index 46140c197e4c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/setup.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-asset' - - -description = "Google Cloud Asset API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", - "google-cloud-os-config >= 1.0.0, <2.0.0dev", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.10.txt deleted file mode 100644 index 70744e58974a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.11.txt deleted file mode 100644 index 70744e58974a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.12.txt deleted file mode 100644 index 70744e58974a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.13.txt deleted file mode 100644 index 70744e58974a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.7.txt deleted file mode 100644 index 231036469dff..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,13 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -google-cloud-access-context-manager==0.1.2 -google-cloud-os-config==1.0.0 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.8.txt deleted file mode 100644 index 70744e58974a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.9.txt deleted file mode 100644 index 70744e58974a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1/tests/__init__.py b/owl-bot-staging/google-cloud-asset/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-asset/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/asset_v1/__init__.py b/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/asset_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/asset_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/asset_v1/test_asset_service.py b/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/asset_v1/test_asset_service.py deleted file mode 100644 index e129f86b9b2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1/tests/unit/gapic/asset_v1/test_asset_service.py +++ /dev/null @@ -1,18295 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient -from google.cloud.asset_v1.services.asset_service import AssetServiceClient -from google.cloud.asset_v1.services.asset_service import pagers -from google.cloud.asset_v1.services.asset_service import transports -from google.cloud.asset_v1.types import asset_service -from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AssetServiceClient._get_default_mtls_endpoint(None) is None - assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssetServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AssetServiceClient._get_client_cert_source(None, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssetServiceGrpcTransport, "grpc"), - (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -def test_asset_service_client_get_transport_class(): - transport = AssetServiceClient.get_transport_class() - available_transports = [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceRestTransport, - ] - assert transport in available_transports - - transport = AssetServiceClient.get_transport_class("grpc") - assert transport == transports.AssetServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) -def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), -]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_asset_service_client_client_options_from_dict(): - with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AssetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) -def test_export_assets(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.ExportAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.ExportAssetsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest( - parent='parent_value', - ) - -def test_export_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc - request = {} - client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.export_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_rpc - - request = {} - await client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.ExportAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_assets_async_from_dict(): - await test_export_assets_async(request_type=dict) - -def test_export_assets_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ExportAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_assets_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ExportAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc - - request = {} - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_assets_async_from_dict(): - await test_list_assets_async(request_type=dict) - -def test_list_assets_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = asset_service.ListAssetsResponse() - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_assets_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_assets_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListAssetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_assets_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_assets_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListAssetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_assets_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_assets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) -def test_list_assets_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_assets_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_assets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assets.Asset) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_assets_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_assets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.BatchGetAssetsHistoryResponse( - ) - response = client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.BatchGetAssetsHistoryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) - - -def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_get_assets_history(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', - ) - -def test_batch_get_assets_history_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_get_assets_history in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc - request = {} - client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_get_assets_history(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_get_assets_history in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_rpc - - request = {} - await client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_get_assets_history(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) - response = await client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.BatchGetAssetsHistoryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) - - -@pytest.mark.asyncio -async def test_batch_get_assets_history_async_from_dict(): - await test_batch_get_assets_history_async(request_type=dict) - -def test_batch_get_assets_history_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.BatchGetAssetsHistoryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value = asset_service.BatchGetAssetsHistoryResponse() - client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_get_assets_history_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.BatchGetAssetsHistoryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse()) - await client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - response = client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.CreateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -def test_create_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', - ) - -def test_create_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - request = {} - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_rpc - - request = {} - await client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - response = await client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.CreateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -@pytest.mark.asyncio -async def test_create_feed_async_from_dict(): - await test_create_feed_async(request_type=dict) - -def test_create_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.CreateFeedRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.CreateFeedRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - await client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_feed( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_create_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_feed( - asset_service.CreateFeedRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_create_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_feed( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_feed( - asset_service.CreateFeedRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - response = client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.GetFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -def test_get_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.GetFeedRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest( - name='name_value', - ) - -def test_get_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc - request = {} - client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_rpc - - request = {} - await client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - response = await client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.GetFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -@pytest.mark.asyncio -async def test_get_feed_async_from_dict(): - await test_get_feed_async(request_type=dict) - -def test_get_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.GetFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.GetFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - await client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_feed( - asset_service.GetFeedRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_feed( - asset_service.GetFeedRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse( - ) - response = client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.ListFeedsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - - -def test_list_feeds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.ListFeedsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_feeds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest( - parent='parent_value', - ) - -def test_list_feeds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_feeds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc - request = {} - client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_feeds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_feeds in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_rpc - - request = {} - await client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_feeds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) - response = await client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.ListFeedsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - - -@pytest.mark.asyncio -async def test_list_feeds_async_from_dict(): - await test_list_feeds_async(request_type=dict) - -def test_list_feeds_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListFeedsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = asset_service.ListFeedsResponse() - client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_feeds_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListFeedsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) - await client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_feeds_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_feeds( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_feeds_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_feeds_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_feeds( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_feeds_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - response = client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.UpdateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -def test_update_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.UpdateFeedRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest( - ) - -def test_update_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc - request = {} - client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_rpc - - request = {} - await client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - response = await client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.UpdateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -@pytest.mark.asyncio -async def test_update_feed_async_from_dict(): - await test_update_feed_async(request_type=dict) - -def test_update_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.UpdateFeedRequest() - - request.feed.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.UpdateFeedRequest() - - request.feed.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - await client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] - - -def test_update_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_feed( - feed=asset_service.Feed(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') - assert arg == mock_val - - -def test_update_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_feed( - feed=asset_service.Feed(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.DeleteFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.DeleteFeedRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest( - name='name_value', - ) - -def test_delete_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc - request = {} - client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_rpc - - request = {} - await client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.DeleteFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_feed_async_from_dict(): - await test_delete_feed_async(request_type=dict) - -def test_delete_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.DeleteFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value = None - client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.DeleteFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_feed( - asset_service.DeleteFeedRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_feed( - asset_service.DeleteFeedRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - ) - response = client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_all_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - ) - -def test_search_all_resources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc - request = {} - client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_rpc - - request = {} - await client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_all_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_all_resources_async_from_dict(): - await test_search_all_resources_async(request_type=dict) - -def test_search_all_resources_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllResourcesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = asset_service.SearchAllResourcesResponse() - client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_all_resources_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllResourcesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) - await client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_search_all_resources_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllResourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - arg = args[0].asset_types - mock_val = ['asset_types_value'] - assert arg == mock_val - - -def test_search_all_resources_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - -@pytest.mark.asyncio -async def test_search_all_resources_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllResourcesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - arg = args[0].asset_types - mock_val = ['asset_types_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_all_resources_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - -def test_search_all_resources_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.search_all_resources(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) -def test_search_all_resources_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_all_resources(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_all_resources_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_all_resources(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_all_resources_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_all_resources(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - ) - response = client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllIamPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_iam_policies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - ) - -def test_search_all_iam_policies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc - request = {} - client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_rpc - - request = {} - await client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_all_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllIamPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_from_dict(): - await test_search_all_iam_policies_async(request_type=dict) - -def test_search_all_iam_policies_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllIamPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = asset_service.SearchAllIamPoliciesResponse() - client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_all_iam_policies_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllIamPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) - await client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_search_all_iam_policies_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllIamPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_all_iam_policies( - scope='scope_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_all_iam_policies_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_all_iam_policies_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllIamPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_all_iam_policies( - scope='scope_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_all_iam_policies_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', - ) - - -def test_search_all_iam_policies_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.search_all_iam_policies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) -def test_search_all_iam_policies_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_all_iam_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_all_iam_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_all_iam_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - ) - response = client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True - - -def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', - ) - -def test_analyze_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc - request = {} - client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_rpc - - request = {} - await client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.analyze_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) - response = await client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True - - -@pytest.mark.asyncio -async def test_analyze_iam_policy_async_from_dict(): - await test_analyze_iam_policy_async(request_type=dict) - -def test_analyze_iam_policy_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeIamPolicyRequest() - - request.analysis_query.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value = asset_service.AnalyzeIamPolicyResponse() - client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_analyze_iam_policy_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeIamPolicyRequest() - - request.analysis_query.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse()) - await client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) -def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_iam_policy_longrunning(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', - ) - -def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc - request = {} - client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.analyze_iam_policy_longrunning(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_iam_policy_longrunning in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_rpc - - request = {} - await client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.analyze_iam_policy_longrunning(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_async_from_dict(): - await test_analyze_iam_policy_longrunning_async(request_type=dict) - -def test_analyze_iam_policy_longrunning_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - - request.analysis_query.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - - request.analysis_query.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'analysis_query.scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeMoveResponse( - ) - response = client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeMoveRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeMoveResponse) - - -def test_analyze_move_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_move(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', - ) - -def test_analyze_move_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_move in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc - request = {} - client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_move(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_move in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_rpc - - request = {} - await client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.analyze_move(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) - response = await client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeMoveRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeMoveResponse) - - -@pytest.mark.asyncio -async def test_analyze_move_async_from_dict(): - await test_analyze_move_async(request_type=dict) - -def test_analyze_move_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeMoveRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value = asset_service.AnalyzeMoveResponse() - client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_analyze_move_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeMoveRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse()) - await client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - ) - response = client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.QueryAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' - assert response.done is True - - -def test_query_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.query_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', - ) - -def test_query_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.query_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - request = {} - client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.query_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.query_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_rpc - - request = {} - await client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.query_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) - response = await client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.QueryAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' - assert response.done is True - - -@pytest.mark.asyncio -async def test_query_assets_async_from_dict(): - await test_query_assets_async(request_type=dict) - -def test_query_assets_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.QueryAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value = asset_service.QueryAssetsResponse() - client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_query_assets_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.QueryAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse()) - await client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - response = client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.CreateSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -def test_create_saved_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_saved_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', - ) - -def test_create_saved_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc - request = {} - client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_saved_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_rpc - - request = {} - await client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - response = await client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.CreateSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -@pytest.mark.asyncio -async def test_create_saved_query_async_from_dict(): - await test_create_saved_query_async(request_type=dict) - -def test_create_saved_query_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.CreateSavedQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value = asset_service.SavedQuery() - client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_saved_query_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.CreateSavedQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) - await client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_saved_query_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') - assert arg == mock_val - arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' - assert arg == mock_val - - -def test_create_saved_query_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_saved_query( - asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', - ) - -@pytest.mark.asyncio -async def test_create_saved_query_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') - assert arg == mock_val - arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_saved_query_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_saved_query( - asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - response = client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.GetSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -def test_get_saved_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.GetSavedQueryRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_saved_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest( - name='name_value', - ) - -def test_get_saved_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc - request = {} - client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_saved_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_rpc - - request = {} - await client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - response = await client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.GetSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -@pytest.mark.asyncio -async def test_get_saved_query_async_from_dict(): - await test_get_saved_query_async(request_type=dict) - -def test_get_saved_query_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.GetSavedQueryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value = asset_service.SavedQuery() - client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_saved_query_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.GetSavedQueryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) - await client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_saved_query_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_saved_query( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_saved_query_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_saved_query( - asset_service.GetSavedQueryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_saved_query_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_saved_query( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_saved_query_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_saved_query( - asset_service.GetSavedQueryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) -def test_list_saved_queries(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.ListSavedQueriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_saved_queries_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_saved_queries(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_saved_queries_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_saved_queries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc - request = {} - client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_saved_queries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_saved_queries in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_rpc - - request = {} - await client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_saved_queries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.ListSavedQueriesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSavedQueriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_saved_queries_async_from_dict(): - await test_list_saved_queries_async(request_type=dict) - -def test_list_saved_queries_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListSavedQueriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value = asset_service.ListSavedQueriesResponse() - client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_saved_queries_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListSavedQueriesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) - await client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_saved_queries_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListSavedQueriesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_saved_queries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_saved_queries_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_saved_queries( - asset_service.ListSavedQueriesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_saved_queries_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListSavedQueriesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_saved_queries( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_saved_queries_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_saved_queries( - asset_service.ListSavedQueriesRequest(), - parent='parent_value', - ) - - -def test_list_saved_queries_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - next_page_token='abc', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[], - next_page_token='def', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - ], - next_page_token='ghi', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_saved_queries(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in results) -def test_list_saved_queries_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - next_page_token='abc', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[], - next_page_token='def', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - ], - next_page_token='ghi', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - ), - RuntimeError, - ) - pages = list(client.list_saved_queries(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_saved_queries_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - next_page_token='abc', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[], - next_page_token='def', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - ], - next_page_token='ghi', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_saved_queries(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_saved_queries_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - next_page_token='abc', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[], - next_page_token='def', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - ], - next_page_token='ghi', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_saved_queries(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - response = client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.UpdateSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -def test_update_saved_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.UpdateSavedQueryRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_saved_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest( - ) - -def test_update_saved_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc - request = {} - client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_saved_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_rpc - - request = {} - await client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - response = await client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.UpdateSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -@pytest.mark.asyncio -async def test_update_saved_query_async_from_dict(): - await test_update_saved_query_async(request_type=dict) - -def test_update_saved_query_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.UpdateSavedQueryRequest() - - request.saved_query.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value = asset_service.SavedQuery() - client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'saved_query.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_saved_query_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.UpdateSavedQueryRequest() - - request.saved_query.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) - await client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'saved_query.name=name_value', - ) in kw['metadata'] - - -def test_update_saved_query_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_saved_query_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_saved_query( - asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_saved_query_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SavedQuery() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_saved_query_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_saved_query( - asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.DeleteSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_saved_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.DeleteSavedQueryRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_saved_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest( - name='name_value', - ) - -def test_delete_saved_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc - request = {} - client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_saved_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_rpc - - request = {} - await client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.DeleteSavedQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_saved_query_async_from_dict(): - await test_delete_saved_query_async(request_type=dict) - -def test_delete_saved_query_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.DeleteSavedQueryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value = None - client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_saved_query_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.DeleteSavedQueryRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_saved_query_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_saved_query( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_saved_query_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_saved_query( - asset_service.DeleteSavedQueryRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_saved_query_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_saved_query( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_saved_query_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_saved_query( - asset_service.DeleteSavedQueryRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) - response = client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) - - -def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_get_effective_iam_policies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', - ) - -def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc - request = {} - client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_get_effective_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_get_effective_iam_policies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_rpc - - request = {} - await client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.batch_get_effective_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) - response = await client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) - - -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_async_from_dict(): - await test_batch_get_effective_iam_policies_async(request_type=dict) - -def test_batch_get_effective_iam_policies_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse()) - await client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) -def test_analyze_org_policies(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - ) - response = client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeOrgPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_analyze_org_policies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_org_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc - request = {} - client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_org_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_org_policies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_rpc - - request = {} - await client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.analyze_org_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) - response = await client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeOrgPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_analyze_org_policies_async_from_dict(): - await test_analyze_org_policies_async(request_type=dict) - -def test_analyze_org_policies_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeOrgPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value = asset_service.AnalyzeOrgPoliciesResponse() - client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_analyze_org_policies_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeOrgPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) - await client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_analyze_org_policies_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].constraint - mock_val = 'constraint_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_analyze_org_policies_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_org_policies( - asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_analyze_org_policies_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].constraint - mock_val = 'constraint_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_org_policies_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_org_policies( - asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - -def test_analyze_org_policies_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.analyze_org_policies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in results) -def test_analyze_org_policies_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - ), - RuntimeError, - ) - pages = list(client.analyze_org_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_analyze_org_policies_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.analyze_org_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_analyze_org_policies_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.analyze_org_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) -def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - ) - response = client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policy_governed_containers(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc - request = {} - client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_org_policy_governed_containers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_org_policy_governed_containers in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_rpc - - request = {} - await client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.analyze_org_policy_governed_containers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) - response = await client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async_from_dict(): - await test_analyze_org_policy_governed_containers_async(request_type=dict) - -def test_analyze_org_policy_governed_containers_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) - await client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_analyze_org_policy_governed_containers_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].constraint - mock_val = 'constraint_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_analyze_org_policy_governed_containers_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_org_policy_governed_containers( - asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].constraint - mock_val = 'constraint_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_org_policy_governed_containers( - asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - -def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.analyze_org_policy_governed_containers(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in results) -def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - ), - RuntimeError, - ) - pages = list(client.analyze_org_policy_governed_containers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - ), - RuntimeError, - ) - async_pager = await client.analyze_org_policy_governed_containers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in responses) - - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.analyze_org_policy_governed_containers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) -def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - ) - response = client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policy_governed_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc - request = {} - client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_org_policy_governed_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_org_policy_governed_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_rpc - - request = {} - await client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.analyze_org_policy_governed_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async_from_dict(): - await test_analyze_org_policy_governed_assets_async(request_type=dict) - -def test_analyze_org_policy_governed_assets_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - await client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_analyze_org_policy_governed_assets_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].constraint - mock_val = 'constraint_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_analyze_org_policy_governed_assets_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_org_policy_governed_assets( - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].constraint - mock_val = 'constraint_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_org_policy_governed_assets( - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - -def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.analyze_org_policy_governed_assets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) -def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - ), - RuntimeError, - ) - pages = list(client.analyze_org_policy_governed_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - ), - RuntimeError, - ) - async_pager = await client.analyze_org_policy_governed_assets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in responses) - - -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.analyze_org_policy_governed_assets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_export_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc - - request = {} - client.export_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_assets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) - - -def test_list_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_assets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) - - -def test_list_assets_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_assets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) - - -def test_list_assets_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_assets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) - - pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_get_assets_history in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc - - request = {} - client.batch_get_assets_history(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_get_assets_history(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_get_assets_history(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_get_assets_history_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) - - -def test_create_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - - request = {} - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["feed_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) - - -def test_create_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) - - -def test_create_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_feed( - asset_service.CreateFeedRequest(), - parent='parent_value', - ) - - -def test_get_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc - - request = {} - client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) - - -def test_get_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_feed( - asset_service.GetFeedRequest(), - name='name_value', - ) - - -def test_list_feeds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_feeds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc - - request = {} - client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_feeds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_feeds(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_feeds_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_feeds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -def test_list_feeds_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_feeds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) - - -def test_list_feeds_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) - - -def test_update_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc - - request = {} - client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) - - -def test_update_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - - # get arguments that satisfy an http rule for this method - sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - feed=asset_service.Feed(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) - - -def test_update_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), - ) - - -def test_delete_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc - - request = {} - client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) - - -def test_delete_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_feed( - asset_service.DeleteFeedRequest(), - name='name_value', - ) - - -def test_search_all_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc - - request = {} - client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["scope"] = 'scope_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_all_resources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_all_resources_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_all_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) - - -def test_search_all_resources_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_all_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) - - -def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - -def test_search_all_resources_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.search_all_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) - - pages = list(client.search_all_resources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc - - request = {} - client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["scope"] = 'scope_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_all_iam_policies(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_all_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) - - -def test_search_all_iam_policies_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - query='query_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_all_iam_policies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) - - -def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', - ) - - -def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.search_all_iam_policies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) - - pages = list(client.search_all_iam_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc - - request = {} - client.analyze_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.analyze_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_iam_policy_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) - - -def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc - - request = {} - client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.analyze_iam_policy_longrunning(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.analyze_iam_policy_longrunning(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) - - -def test_analyze_move_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_move in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc - - request = {} - client.analyze_move(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_move(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMoveRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["destination_parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "destinationParent" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "destinationParent" in jsonified_request - assert jsonified_request["destinationParent"] == request_init["destination_parent"] - - jsonified_request["resource"] = 'resource_value' - jsonified_request["destinationParent"] = 'destination_parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("destination_parent", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "destinationParent" in jsonified_request - assert jsonified_request["destinationParent"] == 'destination_parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.analyze_move(request) - - expected_params = [ - ( - "destinationParent", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_move_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_move._get_unset_required_fields({}) - assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) - - -def test_query_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.query_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - - request = {} - client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.query_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.QueryAssetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.query_assets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_query_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.query_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -def test_create_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc - - request = {} - client.create_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_saved_query_rest_required_fields(request_type=asset_service.CreateSavedQueryRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["saved_query_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "savedQueryId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "savedQueryId" in jsonified_request - assert jsonified_request["savedQueryId"] == request_init["saved_query_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["savedQueryId"] = 'saved_query_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("saved_query_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "savedQueryId" in jsonified_request - assert jsonified_request["savedQueryId"] == 'saved_query_id_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_saved_query(request) - - expected_params = [ - ( - "savedQueryId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) - - -def test_create_saved_query_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_saved_query(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) - - -def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_saved_query( - asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', - ) - - -def test_get_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc - - request = {} - client.get_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSavedQueryRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_saved_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_saved_query_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_saved_query(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) - - -def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_saved_query( - asset_service.GetSavedQueryRequest(), - name='name_value', - ) - - -def test_list_saved_queries_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_saved_queries in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc - - request = {} - client.list_saved_queries(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_saved_queries(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_saved_queries_rest_required_fields(request_type=asset_service.ListSavedQueriesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.ListSavedQueriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_saved_queries(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_saved_queries_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_saved_queries._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_saved_queries_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListSavedQueriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_saved_queries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) - - -def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_saved_queries( - asset_service.ListSavedQueriesRequest(), - parent='parent_value', - ) - - -def test_list_saved_queries_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - next_page_token='abc', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[], - next_page_token='def', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - ], - next_page_token='ghi', - ), - asset_service.ListSavedQueriesResponse( - saved_queries=[ - asset_service.SavedQuery(), - asset_service.SavedQuery(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.ListSavedQueriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_saved_queries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.SavedQuery) - for i in results) - - pages = list(client.list_saved_queries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc - - request = {} - client.update_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_saved_query_rest_required_fields(request_type=asset_service.UpdateSavedQueryRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_saved_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) - - -def test_update_saved_query_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery() - - # get arguments that satisfy an http rule for this method - sample_request = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_saved_query(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, args[1]) - - -def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_saved_query( - asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc - - request = {} - client.delete_saved_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_saved_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_saved_query_rest_required_fields(request_type=asset_service.DeleteSavedQueryRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_saved_query(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_saved_query._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_saved_query_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_saved_query(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) - - -def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_saved_query( - asset_service.DeleteSavedQueryRequest(), - name='name_value', - ) - - -def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc - - request = {} - client.batch_get_effective_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.batch_get_effective_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request_init["names"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "names" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "names" in jsonified_request - assert jsonified_request["names"] == request_init["names"] - - jsonified_request["scope"] = 'scope_value' - jsonified_request["names"] = 'names_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("names", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - assert "names" in jsonified_request - assert jsonified_request["names"] == 'names_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_get_effective_iam_policies(request) - - expected_params = [ - ( - "names", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_get_effective_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) - - -def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_org_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc - - request = {} - client.analyze_org_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_org_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_analyze_org_policies_rest_required_fields(request_type=asset_service.AnalyzeOrgPoliciesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request_init["constraint"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "constraint" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == request_init["constraint"] - - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.analyze_org_policies(request) - - expected_params = [ - ( - "constraint", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_org_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) - - -def test_analyze_org_policies_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPoliciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.analyze_org_policies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1]) - - -def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_org_policies( - asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - -def test_analyze_org_policies_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPoliciesResponse( - org_policy_results=[ - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.analyze_org_policies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) - for i in results) - - pages = list(client.analyze_org_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc - - request = {} - client.analyze_org_policy_governed_containers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_org_policy_governed_containers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_analyze_org_policy_governed_containers_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request_init["constraint"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "constraint" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == request_init["constraint"] - - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.analyze_org_policy_governed_containers(request) - - expected_params = [ - ( - "constraint", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) - - -def test_analyze_org_policy_governed_containers_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.analyze_org_policy_governed_containers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" % client.transport._host, args[1]) - - -def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_org_policy_governed_containers( - asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - -def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - governed_containers=[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.analyze_org_policy_governed_containers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) - for i in results) - - pages = list(client.analyze_org_policy_governed_containers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc - - request = {} - client.analyze_org_policy_governed_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_org_policy_governed_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request_init["constraint"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "constraint" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == request_init["constraint"] - - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.analyze_org_policy_governed_assets(request) - - expected_params = [ - ( - "constraint", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) - - -def test_analyze_org_policy_governed_assets_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.analyze_org_policy_governed_assets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, args[1]) - - -def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_org_policy_governed_assets( - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - ) - - -def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='abc', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[], - next_page_token='def', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - next_page_token='ghi', - ), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - governed_assets=[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.analyze_org_policy_governed_assets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) - for i in results) - - pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AssetServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AssetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AssetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_assets_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ExportAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = asset_service.ListAssetsResponse() - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_get_assets_history_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value = asset_service.BatchGetAssetsHistoryResponse() - client.batch_get_assets_history(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetAssetsHistoryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.create_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.get_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_feeds_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = asset_service.ListFeedsResponse() - client.list_feeds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListFeedsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.update_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value = None - client.delete_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_resources_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = asset_service.SearchAllResourcesResponse() - client.search_all_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_iam_policies_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = asset_service.SearchAllIamPoliciesResponse() - client.search_all_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllIamPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_iam_policy_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value = asset_service.AnalyzeIamPolicyResponse() - client.analyze_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_iam_policy_longrunning_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.analyze_iam_policy_longrunning(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_move_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value = asset_service.AnalyzeMoveResponse() - client.analyze_move(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeMoveRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_query_assets_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value = asset_service.QueryAssetsResponse() - client.query_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.QueryAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_saved_query_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value = asset_service.SavedQuery() - client.create_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_saved_query_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value = asset_service.SavedQuery() - client.get_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_saved_queries_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value = asset_service.ListSavedQueriesResponse() - client.list_saved_queries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListSavedQueriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_saved_query_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value = asset_service.SavedQuery() - client.update_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_saved_query_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value = None - client.delete_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_get_effective_iam_policies_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - client.batch_get_effective_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_org_policies_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value = asset_service.AnalyzeOrgPoliciesResponse() - client.analyze_org_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_org_policy_governed_containers_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - client.analyze_org_policy_governed_containers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_org_policy_governed_assets_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - client.analyze_org_policy_governed_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_assets_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ExportAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_assets_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_get_assets_history_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) - await client.batch_get_assets_history(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetAssetsHistoryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.create_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.get_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_feeds_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) - await client.list_feeds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListFeedsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.update_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_all_resources_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_all_iam_policies_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllIamPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_iam_policy_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) - await client.analyze_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.analyze_iam_policy_longrunning(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_move_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) - await client.analyze_move(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeMoveRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_query_assets_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) - await client.query_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.QueryAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_saved_query_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.create_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_saved_query_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.get_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_saved_queries_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_saved_queries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListSavedQueriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_saved_query_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.update_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_saved_query_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) - await client.batch_get_effective_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_org_policies_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_containers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AssetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_assets(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) -def test_export_assets_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_assets(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = asset_service.ExportAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_assets(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) - req.return_value.content = return_value - - request = asset_service.ListAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListAssetsResponse() - post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata - - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.BatchGetAssetsHistoryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_get_assets_history(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_get_assets_history(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_assets_history_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) - req.return_value.content = return_value - - request = asset_service.BatchGetAssetsHistoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.BatchGetAssetsHistoryResponse() - post_with_metadata.return_value = asset_service.BatchGetAssetsHistoryResponse(), metadata - - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.Feed.to_json(asset_service.Feed()) - req.return_value.content = return_value - - request = asset_service.CreateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - post_with_metadata.return_value = asset_service.Feed(), metadata - - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.Feed.to_json(asset_service.Feed()) - req.return_value.content = return_value - - request = asset_service.GetFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - post_with_metadata.return_value = asset_service.Feed(), metadata - - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_feeds(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_feeds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_feeds_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) - req.return_value.content = return_value - - request = asset_service.ListFeedsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListFeedsResponse() - post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata - - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.Feed.to_json(asset_service.Feed()) - req.return_value.content = return_value - - request = asset_service.UpdateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - post_with_metadata.return_value = asset_service.Feed(), metadata - - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_feed(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = asset_service.DeleteFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_resources(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_resources_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) - req.return_value.content = return_value - - request = asset_service.SearchAllResourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllResourcesResponse() - post_with_metadata.return_value = asset_service.SearchAllResourcesResponse(), metadata - - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_iam_policies(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) - req.return_value.content = return_value - - request = asset_service.SearchAllIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllIamPoliciesResponse() - post_with_metadata.return_value = asset_service.SearchAllIamPoliciesResponse(), metadata - - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.analyze_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.analyze_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) - req.return_value.content = return_value - - request = asset_service.AnalyzeIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeIamPolicyResponse() - post_with_metadata.return_value = asset_service.AnalyzeIamPolicyResponse(), metadata - - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.analyze_iam_policy_longrunning(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) -def test_analyze_iam_policy_longrunning_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.analyze_iam_policy_longrunning(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.analyze_move(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.analyze_move(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeMoveResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_move_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) - req.return_value.content = return_value - - request = asset_service.AnalyzeMoveRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeMoveResponse() - post_with_metadata.return_value = asset_service.AnalyzeMoveResponse(), metadata - - client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.query_assets(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.query_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' - assert response.done is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) - req.return_value.content = return_value - - request = asset_service.QueryAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.QueryAssetsResponse() - post_with_metadata.return_value = asset_service.QueryAssetsResponse(), metadata - - client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_saved_query(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - req.return_value.content = return_value - - request = asset_service.CreateSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - post_with_metadata.return_value = asset_service.SavedQuery(), metadata - - client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_saved_query(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - req.return_value.content = return_value - - request = asset_service.GetSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - post_with_metadata.return_value = asset_service.SavedQuery(), metadata - - client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_saved_queries(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) -def test_list_saved_queries_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_saved_queries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_saved_queries_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) - req.return_value.content = return_value - - request = asset_service.ListSavedQueriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListSavedQueriesResponse() - post_with_metadata.return_value = asset_service.ListSavedQueriesResponse(), metadata - - client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_saved_query(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - req.return_value.content = return_value - - request = asset_service.UpdateSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - post_with_metadata.return_value = asset_service.SavedQuery(), metadata - - client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_saved_query(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_saved_query(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = asset_service.DeleteSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_get_effective_iam_policies(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_get_effective_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) - req.return_value.content = return_value - - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - post_with_metadata.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse(), metadata - - client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.analyze_org_policies(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) -def test_analyze_org_policies_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.analyze_org_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) - req.return_value.content = return_value - - request = asset_service.AnalyzeOrgPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPoliciesResponse(), metadata - - client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.analyze_org_policy_governed_containers(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) -def test_analyze_org_policy_governed_containers_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.analyze_org_policy_governed_containers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) - req.return_value.content = return_value - - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), metadata - - client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.analyze_org_policy_governed_assets(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) -def test_analyze_org_policy_governed_assets_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.analyze_org_policy_governed_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - req.return_value.content = return_value - - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), metadata - - client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_initialize_client_w_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_assets_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - client.export_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ExportAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_get_assets_history_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - client.batch_get_assets_history(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetAssetsHistoryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - client.create_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - client.get_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_feeds_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - client.list_feeds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListFeedsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - client.update_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - client.delete_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_resources_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - client.search_all_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_iam_policies_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - client.search_all_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllIamPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_iam_policy_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - client.analyze_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_iam_policy_longrunning_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - client.analyze_iam_policy_longrunning(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_move_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - client.analyze_move(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeMoveRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_query_assets_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - client.query_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.QueryAssetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_saved_query_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - client.create_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_saved_query_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - client.get_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_saved_queries_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - client.list_saved_queries(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListSavedQueriesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_saved_query_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - client.update_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_saved_query_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - client.delete_saved_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteSavedQueryRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_get_effective_iam_policies_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - client.batch_get_effective_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_org_policies_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - client.analyze_org_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_org_policy_governed_containers_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - client.analyze_org_policy_governed_containers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_org_policy_governed_assets_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - client.analyze_org_policy_governed_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - assert args[0] == request_msg - - -def test_asset_service_rest_lro_client(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AssetServiceGrpcTransport, - ) - -def test_asset_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_asset_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'export_assets', - 'list_assets', - 'batch_get_assets_history', - 'create_feed', - 'get_feed', - 'list_feeds', - 'update_feed', - 'delete_feed', - 'search_all_resources', - 'search_all_iam_policies', - 'analyze_iam_policy', - 'analyze_iam_policy_longrunning', - 'analyze_move', - 'query_assets', - 'create_saved_query', - 'get_saved_query', - 'list_saved_queries', - 'update_saved_query', - 'delete_saved_query', - 'batch_get_effective_iam_policies', - 'analyze_org_policies', - 'analyze_org_policy_governed_containers', - 'analyze_org_policy_governed_assets', - 'get_operation', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_asset_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_asset_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport() - adc.assert_called_once() - - -def test_asset_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - ], -) -def test_asset_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, - ], -) -def test_asset_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AssetServiceGrpcTransport, grpc_helpers), - (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_asset_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_asset_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_no_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_with_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_asset_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AssetServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AssetServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.export_assets._session - session2 = client2.transport.export_assets._session - assert session1 != session2 - session1 = client1.transport.list_assets._session - session2 = client2.transport.list_assets._session - assert session1 != session2 - session1 = client1.transport.batch_get_assets_history._session - session2 = client2.transport.batch_get_assets_history._session - assert session1 != session2 - session1 = client1.transport.create_feed._session - session2 = client2.transport.create_feed._session - assert session1 != session2 - session1 = client1.transport.get_feed._session - session2 = client2.transport.get_feed._session - assert session1 != session2 - session1 = client1.transport.list_feeds._session - session2 = client2.transport.list_feeds._session - assert session1 != session2 - session1 = client1.transport.update_feed._session - session2 = client2.transport.update_feed._session - assert session1 != session2 - session1 = client1.transport.delete_feed._session - session2 = client2.transport.delete_feed._session - assert session1 != session2 - session1 = client1.transport.search_all_resources._session - session2 = client2.transport.search_all_resources._session - assert session1 != session2 - session1 = client1.transport.search_all_iam_policies._session - session2 = client2.transport.search_all_iam_policies._session - assert session1 != session2 - session1 = client1.transport.analyze_iam_policy._session - session2 = client2.transport.analyze_iam_policy._session - assert session1 != session2 - session1 = client1.transport.analyze_iam_policy_longrunning._session - session2 = client2.transport.analyze_iam_policy_longrunning._session - assert session1 != session2 - session1 = client1.transport.analyze_move._session - session2 = client2.transport.analyze_move._session - assert session1 != session2 - session1 = client1.transport.query_assets._session - session2 = client2.transport.query_assets._session - assert session1 != session2 - session1 = client1.transport.create_saved_query._session - session2 = client2.transport.create_saved_query._session - assert session1 != session2 - session1 = client1.transport.get_saved_query._session - session2 = client2.transport.get_saved_query._session - assert session1 != session2 - session1 = client1.transport.list_saved_queries._session - session2 = client2.transport.list_saved_queries._session - assert session1 != session2 - session1 = client1.transport.update_saved_query._session - session2 = client2.transport.update_saved_query._session - assert session1 != session2 - session1 = client1.transport.delete_saved_query._session - session2 = client2.transport.delete_saved_query._session - assert session1 != session2 - session1 = client1.transport.batch_get_effective_iam_policies._session - session2 = client2.transport.batch_get_effective_iam_policies._session - assert session1 != session2 - session1 = client1.transport.analyze_org_policies._session - session2 = client2.transport.analyze_org_policies._session - assert session1 != session2 - session1 = client1.transport.analyze_org_policy_governed_containers._session - session2 = client2.transport.analyze_org_policy_governed_containers._session - assert session1 != session2 - session1 = client1.transport.analyze_org_policy_governed_assets._session - session2 = client2.transport.analyze_org_policy_governed_assets._session - assert session1 != session2 -def test_asset_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_asset_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_asset_service_grpc_lro_client(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_asset_service_grpc_lro_async_client(): - client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_access_level_path(): - access_policy = "squid" - access_level = "clam" - expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) - actual = AssetServiceClient.access_level_path(access_policy, access_level) - assert expected == actual - - -def test_parse_access_level_path(): - expected = { - "access_policy": "whelk", - "access_level": "octopus", - } - path = AssetServiceClient.access_level_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_access_level_path(path) - assert expected == actual - -def test_access_policy_path(): - access_policy = "oyster" - expected = "accessPolicies/{access_policy}".format(access_policy=access_policy, ) - actual = AssetServiceClient.access_policy_path(access_policy) - assert expected == actual - - -def test_parse_access_policy_path(): - expected = { - "access_policy": "nudibranch", - } - path = AssetServiceClient.access_policy_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_access_policy_path(path) - assert expected == actual - -def test_asset_path(): - expected = "*".format() - actual = AssetServiceClient.asset_path() - assert expected == actual - - -def test_parse_asset_path(): - expected = { - } - path = AssetServiceClient.asset_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_asset_path(path) - assert expected == actual - -def test_feed_path(): - project = "cuttlefish" - feed = "mussel" - expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) - actual = AssetServiceClient.feed_path(project, feed) - assert expected == actual - - -def test_parse_feed_path(): - expected = { - "project": "winkle", - "feed": "nautilus", - } - path = AssetServiceClient.feed_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_feed_path(path) - assert expected == actual - -def test_inventory_path(): - project = "scallop" - location = "abalone" - instance = "squid" - expected = "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) - actual = AssetServiceClient.inventory_path(project, location, instance) - assert expected == actual - - -def test_parse_inventory_path(): - expected = { - "project": "clam", - "location": "whelk", - "instance": "octopus", - } - path = AssetServiceClient.inventory_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_inventory_path(path) - assert expected == actual - -def test_saved_query_path(): - project = "oyster" - saved_query = "nudibranch" - expected = "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) - actual = AssetServiceClient.saved_query_path(project, saved_query) - assert expected == actual - - -def test_parse_saved_query_path(): - expected = { - "project": "cuttlefish", - "saved_query": "mussel", - } - path = AssetServiceClient.saved_query_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_saved_query_path(path) - assert expected == actual - -def test_service_perimeter_path(): - access_policy = "winkle" - service_perimeter = "nautilus" - expected = "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) - actual = AssetServiceClient.service_perimeter_path(access_policy, service_perimeter) - assert expected == actual - - -def test_parse_service_perimeter_path(): - expected = { - "access_policy": "scallop", - "service_perimeter": "abalone", - } - path = AssetServiceClient.service_perimeter_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_service_perimeter_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AssetServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = AssetServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = AssetServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = AssetServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AssetServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = AssetServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = AssetServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = AssetServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AssetServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = AssetServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AssetServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_get_operation(transport: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/.coveragerc b/owl-bot-staging/google-cloud-asset/v1p1beta1/.coveragerc deleted file mode 100644 index 801f6d8a1da7..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/asset/__init__.py - google/cloud/asset/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/.flake8 b/owl-bot-staging/google-cloud-asset/v1p1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-asset/v1p1beta1/MANIFEST.in deleted file mode 100644 index 165ea47251e5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/asset *.py -recursive-include google/cloud/asset_v1p1beta1 *.py diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/README.rst b/owl-bot-staging/google-cloud-asset/v1p1beta1/README.rst deleted file mode 100644 index a10b3ef1e958..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Asset API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Asset API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/asset_service.rst b/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/asset_service.rst deleted file mode 100644 index c034bc69c874..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/asset_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AssetService ------------------------------- - -.. automodule:: google.cloud.asset_v1p1beta1.services.asset_service - :members: - :inherited-members: - -.. automodule:: google.cloud.asset_v1p1beta1.services.asset_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/services_.rst b/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/services_.rst deleted file mode 100644 index 2839e3548f54..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Asset v1p1beta1 API -============================================= -.. toctree:: - :maxdepth: 2 - - asset_service diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/types_.rst b/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/types_.rst deleted file mode 100644 index 0df9177454f6..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/asset_v1p1beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Asset v1p1beta1 API -========================================== - -.. automodule:: google.cloud.asset_v1p1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/conf.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/conf.py deleted file mode 100644 index fa7647914fb5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-asset documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-asset" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-asset-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-asset.tex", - u"google-cloud-asset Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-asset", - u"Google Cloud Asset Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-asset", - u"google-cloud-asset Documentation", - author, - "google-cloud-asset", - "GAPIC library for Google Cloud Asset API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/index.rst b/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/index.rst deleted file mode 100644 index e1c526b36445..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - asset_v1p1beta1/services_ - asset_v1p1beta1/types_ diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/__init__.py deleted file mode 100644 index 6e43b5af463b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.asset_v1p1beta1.services.asset_service.client import AssetServiceClient -from google.cloud.asset_v1p1beta1.services.asset_service.async_client import AssetServiceAsyncClient - -from google.cloud.asset_v1p1beta1.types.asset_service import SearchAllIamPoliciesRequest -from google.cloud.asset_v1p1beta1.types.asset_service import SearchAllIamPoliciesResponse -from google.cloud.asset_v1p1beta1.types.asset_service import SearchAllResourcesRequest -from google.cloud.asset_v1p1beta1.types.asset_service import SearchAllResourcesResponse -from google.cloud.asset_v1p1beta1.types.assets import IamPolicySearchResult -from google.cloud.asset_v1p1beta1.types.assets import Permissions -from google.cloud.asset_v1p1beta1.types.assets import StandardResourceMetadata - -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'IamPolicySearchResult', - 'Permissions', - 'StandardResourceMetadata', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/py.typed b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/__init__.py deleted file mode 100644 index 3faa8d758e77..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset_v1p1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.asset_service import AssetServiceClient -from .services.asset_service import AssetServiceAsyncClient - -from .types.asset_service import SearchAllIamPoliciesRequest -from .types.asset_service import SearchAllIamPoliciesResponse -from .types.asset_service import SearchAllResourcesRequest -from .types.asset_service import SearchAllResourcesResponse -from .types.assets import IamPolicySearchResult -from .types.assets import Permissions -from .types.assets import StandardResourceMetadata - -__all__ = ( - 'AssetServiceAsyncClient', -'AssetServiceClient', -'IamPolicySearchResult', -'Permissions', -'SearchAllIamPoliciesRequest', -'SearchAllIamPoliciesResponse', -'SearchAllResourcesRequest', -'SearchAllResourcesResponse', -'StandardResourceMetadata', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/gapic_metadata.json deleted file mode 100644 index c8f50c010843..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/gapic_metadata.json +++ /dev/null @@ -1,58 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.asset_v1p1beta1", - "protoPackage": "google.cloud.asset.v1p1beta1", - "schema": "1.0", - "services": { - "AssetService": { - "clients": { - "grpc": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "SearchAllIamPolicies": { - "methods": [ - "search_all_iam_policies" - ] - }, - "SearchAllResources": { - "methods": [ - "search_all_resources" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AssetServiceAsyncClient", - "rpcs": { - "SearchAllIamPolicies": { - "methods": [ - "search_all_iam_policies" - ] - }, - "SearchAllResources": { - "methods": [ - "search_all_resources" - ] - } - } - }, - "rest": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "SearchAllIamPolicies": { - "methods": [ - "search_all_iam_policies" - ] - }, - "SearchAllResources": { - "methods": [ - "search_all_resources" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/py.typed b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/__init__.py deleted file mode 100644 index 1ad75a011889..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AssetServiceClient -from .async_client import AssetServiceAsyncClient - -__all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py deleted file mode 100644 index f9179911ef81..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py +++ /dev/null @@ -1,557 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.asset_v1p1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.asset_v1p1beta1.services.asset_service import pagers -from google.cloud.asset_v1p1beta1.types import asset_service -from google.cloud.asset_v1p1beta1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .client import AssetServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AssetServiceAsyncClient: - """Asset service definition.""" - - _client: AssetServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AssetServiceClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AssetServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AssetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AssetServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) - common_location_path = staticmethod(AssetServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AssetServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AssetServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1p1beta1.AssetServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "credentialsType": None, - } - ) - - async def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesAsyncPager: - r"""Searches all the resources within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - resources within a scope, even if they don't have ``.get`` - permission of all the resources. Callers should have - ``cloudasset.assets.searchAllResources`` permission on the - requested scope, otherwise the request will be rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p1beta1 - - async def sample_search_all_resources(): - # Create a client - client = asset_v1p1beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p1beta1.types.SearchAllResourcesRequest, dict]]): - The request object. Search all resources request. - scope (:class:`str`): - Required. The relative name of an asset. The search is - limited to the resources within the ``scope``. The - allowed value must be: - - - Organization number (such as "organizations/123") - - Folder number (such as "folders/1234") - - Project number (such as "projects/12345") - - Project ID (such as "projects/abc") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Optional. The query statement. - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_types (:class:`MutableSequence[str]`): - Optional. A list of asset types that - this request searches for. If empty, it - will search all the supported asset - types. - - This corresponds to the ``asset_types`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllResourcesAsyncPager: - Search all resources response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllResourcesRequest): - request = asset_service.SearchAllResourcesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - if asset_types: - request.asset_types.extend(asset_types) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchAllResourcesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesAsyncPager: - r"""Searches all the IAM policies within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - IAM policies within a scope, even if they don't have - ``.getIamPolicy`` permission of all the IAM policies. Callers - should have ``cloudasset.assets.searchAllIamPolicies`` - permission on the requested scope, otherwise the request will be - rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p1beta1 - - async def sample_search_all_iam_policies(): - # Create a client - client = asset_v1p1beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesRequest, dict]]): - The request object. Search all IAM policies request. - scope (:class:`str`): - Required. The relative name of an asset. The search is - limited to the resources within the ``scope``. The - allowed value must be: - - - Organization number (such as "organizations/123") - - Folder number (such as "folders/1234") - - Project number (such as "projects/12345") - - Project ID (such as "projects/abc") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Optional. The query statement. Examples: - - - "policy:myuser@mydomain.com" - - "policy:(myuser@mydomain.com viewer)" - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: - Search all IAM policies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): - request = asset_service.SearchAllIamPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_iam_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchAllIamPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AssetServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/client.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/client.py deleted file mode 100644 index b0453a2c7b99..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/client.py +++ /dev/null @@ -1,912 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.asset_v1p1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.asset_v1p1beta1.services.asset_service import pagers -from google.cloud.asset_v1p1beta1.types import asset_service -from google.cloud.asset_v1p1beta1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AssetServiceGrpcTransport -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .transports.rest import AssetServiceRestTransport - - -class AssetServiceClientMeta(type): - """Metaclass for the AssetService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] - _transport_registry["grpc"] = AssetServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AssetServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AssetServiceClient(metaclass=AssetServiceClientMeta): - """Asset service definition.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "cloudasset.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AssetServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AssetServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() - self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AssetServiceTransport) - if transport_provided: - # transport is a AssetServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AssetServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( - AssetServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AssetServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1p1beta1.AssetServiceClient`.", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "credentialsType": None, - } - ) - - def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesPager: - r"""Searches all the resources within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - resources within a scope, even if they don't have ``.get`` - permission of all the resources. Callers should have - ``cloudasset.assets.searchAllResources`` permission on the - requested scope, otherwise the request will be rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p1beta1 - - def sample_search_all_resources(): - # Create a client - client = asset_v1p1beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1p1beta1.types.SearchAllResourcesRequest, dict]): - The request object. Search all resources request. - scope (str): - Required. The relative name of an asset. The search is - limited to the resources within the ``scope``. The - allowed value must be: - - - Organization number (such as "organizations/123") - - Folder number (such as "folders/1234") - - Project number (such as "projects/12345") - - Project ID (such as "projects/abc") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Optional. The query statement. - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - asset_types (MutableSequence[str]): - Optional. A list of asset types that - this request searches for. If empty, it - will search all the supported asset - types. - - This corresponds to the ``asset_types`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllResourcesPager: - Search all resources response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query, asset_types] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllResourcesRequest): - request = asset_service.SearchAllResourcesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - if asset_types is not None: - request.asset_types = asset_types - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_all_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchAllResourcesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesPager: - r"""Searches all the IAM policies within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - IAM policies within a scope, even if they don't have - ``.getIamPolicy`` permission of all the IAM policies. Callers - should have ``cloudasset.assets.searchAllIamPolicies`` - permission on the requested scope, otherwise the request will be - rejected. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p1beta1 - - def sample_search_all_iam_policies(): - # Create a client - client = asset_v1p1beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesRequest, dict]): - The request object. Search all IAM policies request. - scope (str): - Required. The relative name of an asset. The search is - limited to the resources within the ``scope``. The - allowed value must be: - - - Organization number (such as "organizations/123") - - Folder number (such as "folders/1234") - - Project number (such as "projects/12345") - - Project ID (such as "projects/abc") - - This corresponds to the ``scope`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Optional. The query statement. Examples: - - - "policy:myuser@mydomain.com" - - "policy:(myuser@mydomain.com viewer)" - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllIamPoliciesPager: - Search all IAM policies response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [scope, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): - request = asset_service.SearchAllIamPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if scope is not None: - request.scope = scope - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_all_iam_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("scope", request.scope), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchAllIamPoliciesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AssetServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/pagers.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/pagers.py deleted file mode 100644 index 45706772eb83..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/pagers.py +++ /dev/null @@ -1,306 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.asset_v1p1beta1.types import asset_service -from google.cloud.asset_v1p1beta1.types import assets - - -class SearchAllResourcesPager: - """A pager for iterating through ``search_all_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1p1beta1.types.SearchAllResourcesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchAllResources`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1p1beta1.types.SearchAllResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.SearchAllResourcesResponse], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1p1beta1.types.SearchAllResourcesRequest): - The initial request object. - response (google.cloud.asset_v1p1beta1.types.SearchAllResourcesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.SearchAllResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assets.StandardResourceMetadata]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllResourcesAsyncPager: - """A pager for iterating through ``search_all_resources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1p1beta1.types.SearchAllResourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchAllResources`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1p1beta1.types.SearchAllResourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1p1beta1.types.SearchAllResourcesRequest): - The initial request object. - response (google.cloud.asset_v1p1beta1.types.SearchAllResourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllResourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assets.StandardResourceMetadata]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllIamPoliciesPager: - """A pager for iterating through ``search_all_iam_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchAllIamPolicies`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.SearchAllIamPoliciesResponse], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesRequest): - The initial request object. - response (google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllIamPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.SearchAllIamPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: - for page in self.pages: - yield from page.results - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllIamPoliciesAsyncPager: - """A pager for iterating through ``search_all_iam_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``results`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchAllIamPolicies`` requests and continue to iterate - through the ``results`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesRequest): - The initial request object. - response (google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.SearchAllIamPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: - async def async_generator(): - async for page in self.pages: - for response in page.results: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/README.rst b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/README.rst deleted file mode 100644 index f0467812ea79..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AssetServiceTransport` is the ABC for all transports. -- public child `AssetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AssetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAssetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AssetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/__init__.py deleted file mode 100644 index 315eb22bd6cb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AssetServiceTransport -from .grpc import AssetServiceGrpcTransport -from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .rest import AssetServiceRestTransport -from .rest import AssetServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport - -__all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/base.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/base.py deleted file mode 100644 index b4ab4ec8ac92..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/base.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.asset_v1p1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.asset_v1p1beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AssetServiceTransport(abc.ABC): - """Abstract transport class for AssetService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudasset.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.search_all_resources: gapic_v1.method.wrap_method( - self.search_all_resources, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=15.0, - ), - default_timeout=15.0, - client_info=client_info, - ), - self.search_all_iam_policies: gapic_v1.method.wrap_method( - self.search_all_iam_policies, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=15.0, - ), - default_timeout=15.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Union[ - asset_service.SearchAllResourcesResponse, - Awaitable[asset_service.SearchAllResourcesResponse] - ]]: - raise NotImplementedError() - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Union[ - asset_service.SearchAllIamPoliciesResponse, - Awaitable[asset_service.SearchAllIamPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AssetServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/grpc.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/grpc.py deleted file mode 100644 index 8b4ed0a9b338..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/grpc.py +++ /dev/null @@ -1,387 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.asset_v1p1beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcTransport(AssetServiceTransport): - """gRPC backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: - r"""Return a callable for the search all resources method over gRPC. - - Searches all the resources within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - resources within a scope, even if they don't have ``.get`` - permission of all the resources. Callers should have - ``cloudasset.assets.searchAllResources`` permission on the - requested scope, otherwise the request will be rejected. - - Returns: - Callable[[~.SearchAllResourcesRequest], - ~.SearchAllResourcesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p1beta1.AssetService/SearchAllResources', - request_serializer=asset_service.SearchAllResourcesRequest.serialize, - response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, - ) - return self._stubs['search_all_resources'] - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: - r"""Return a callable for the search all iam policies method over gRPC. - - Searches all the IAM policies within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - IAM policies within a scope, even if they don't have - ``.getIamPolicy`` permission of all the IAM policies. Callers - should have ``cloudasset.assets.searchAllIamPolicies`` - permission on the requested scope, otherwise the request will be - rejected. - - Returns: - Callable[[~.SearchAllIamPoliciesRequest], - ~.SearchAllIamPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p1beta1.AssetService/SearchAllIamPolicies', - request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, - response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, - ) - return self._stubs['search_all_iam_policies'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AssetServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/grpc_asyncio.py deleted file mode 100644 index d2cc50244ef9..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,432 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.asset_v1p1beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AssetServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): - """gRPC AsyncIO backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Awaitable[asset_service.SearchAllResourcesResponse]]: - r"""Return a callable for the search all resources method over gRPC. - - Searches all the resources within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - resources within a scope, even if they don't have ``.get`` - permission of all the resources. Callers should have - ``cloudasset.assets.searchAllResources`` permission on the - requested scope, otherwise the request will be rejected. - - Returns: - Callable[[~.SearchAllResourcesRequest], - Awaitable[~.SearchAllResourcesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p1beta1.AssetService/SearchAllResources', - request_serializer=asset_service.SearchAllResourcesRequest.serialize, - response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, - ) - return self._stubs['search_all_resources'] - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Awaitable[asset_service.SearchAllIamPoliciesResponse]]: - r"""Return a callable for the search all iam policies method over gRPC. - - Searches all the IAM policies within a given accessible Resource - Manager scope (project/folder/organization). This RPC gives - callers especially administrators the ability to search all the - IAM policies within a scope, even if they don't have - ``.getIamPolicy`` permission of all the IAM policies. Callers - should have ``cloudasset.assets.searchAllIamPolicies`` - permission on the requested scope, otherwise the request will be - rejected. - - Returns: - Callable[[~.SearchAllIamPoliciesRequest], - Awaitable[~.SearchAllIamPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p1beta1.AssetService/SearchAllIamPolicies', - request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, - response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, - ) - return self._stubs['search_all_iam_policies'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.search_all_resources: self._wrap_method( - self.search_all_resources, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=15.0, - ), - default_timeout=15.0, - client_info=client_info, - ), - self.search_all_iam_policies: self._wrap_method( - self.search_all_iam_policies, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=15.0, - ), - default_timeout=15.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py deleted file mode 100644 index 35a37053908a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest.py +++ /dev/null @@ -1,519 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.asset_v1p1beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAssetServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AssetServiceRestInterceptor: - """Interceptor for AssetService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AssetServiceRestTransport. - - .. code-block:: python - class MyCustomAssetServiceInterceptor(AssetServiceRestInterceptor): - def pre_search_all_iam_policies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_all_iam_policies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_all_resources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_all_resources(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) - client = AssetServiceClient(transport=transport) - - - """ - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_all_iam_policies - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: - """Post-rpc interceptor for search_all_iam_policies - - DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_search_all_iam_policies` interceptor runs - before the `post_search_all_iam_policies_with_metadata` interceptor. - """ - return response - - def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_all_iam_policies - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_search_all_iam_policies_with_metadata` - interceptor in new development instead of the `post_search_all_iam_policies` interceptor. - When both interceptors are used, this `post_search_all_iam_policies_with_metadata` interceptor runs after the - `post_search_all_iam_policies` interceptor. The (possibly modified) response returned by - `post_search_all_iam_policies` will be passed to - `post_search_all_iam_policies_with_metadata`. - """ - return response, metadata - - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_all_resources - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: - """Post-rpc interceptor for search_all_resources - - DEPRECATED. Please use the `post_search_all_resources_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_search_all_resources` interceptor runs - before the `post_search_all_resources_with_metadata` interceptor. - """ - return response - - def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_all_resources - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_search_all_resources_with_metadata` - interceptor in new development instead of the `post_search_all_resources` interceptor. - When both interceptors are used, this `post_search_all_resources_with_metadata` interceptor runs after the - `post_search_all_resources` interceptor. The (possibly modified) response returned by - `post_search_all_resources` will be passed to - `post_search_all_resources_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class AssetServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AssetServiceRestInterceptor - - -class AssetServiceRestTransport(_BaseAssetServiceRestTransport): - """REST backend synchronous transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AssetServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.SearchAllIamPolicies") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllIamPoliciesResponse: - r"""Call the search all iam policies method over HTTP. - - Args: - request (~.asset_service.SearchAllIamPoliciesRequest): - The request object. Search all IAM policies request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SearchAllIamPoliciesResponse: - Search all IAM policies response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() - - request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p1beta1.AssetServiceClient.SearchAllIamPolicies", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": "SearchAllIamPolicies", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SearchAllIamPoliciesResponse() - pb_resp = asset_service.SearchAllIamPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_all_iam_policies(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p1beta1.AssetServiceClient.search_all_iam_policies", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": "SearchAllIamPolicies", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.SearchAllResources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllResourcesResponse: - r"""Call the search all resources method over HTTP. - - Args: - request (~.asset_service.SearchAllResourcesRequest): - The request object. Search all resources request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.SearchAllResourcesResponse: - Search all resources response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() - - request, metadata = self._interceptor.pre_search_all_resources(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p1beta1.AssetServiceClient.SearchAllResources", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": "SearchAllResources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.SearchAllResourcesResponse() - pb_resp = asset_service.SearchAllResourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_all_resources(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.SearchAllResourcesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p1beta1.AssetServiceClient.search_all_resources", - extra = { - "serviceName": "google.cloud.asset.v1p1beta1.AssetService", - "rpcName": "SearchAllResources", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest_base.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest_base.py deleted file mode 100644 index af3a96308006..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/services/asset_service/transports/rest_base.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.asset_v1p1beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAssetServiceRestTransport(AssetServiceTransport): - """Base REST backend transport for AssetService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseSearchAllIamPolicies: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1p1beta1/{scope=*/*}/iamPolicies:searchAll', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.SearchAllIamPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchAllResources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1p1beta1/{scope=*/*}/resources:searchAll', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.SearchAllResourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseAssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/__init__.py deleted file mode 100644 index 90a6d1622a1c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .asset_service import ( - SearchAllIamPoliciesRequest, - SearchAllIamPoliciesResponse, - SearchAllResourcesRequest, - SearchAllResourcesResponse, -) -from .assets import ( - IamPolicySearchResult, - Permissions, - StandardResourceMetadata, -) - -__all__ = ( - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'IamPolicySearchResult', - 'Permissions', - 'StandardResourceMetadata', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/asset_service.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/asset_service.py deleted file mode 100644 index 513388965869..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/asset_service.py +++ /dev/null @@ -1,212 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.asset_v1p1beta1.types import assets - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1p1beta1', - manifest={ - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - }, -) - - -class SearchAllResourcesRequest(proto.Message): - r"""Search all resources request. - - Attributes: - scope (str): - Required. The relative name of an asset. The search is - limited to the resources within the ``scope``. The allowed - value must be: - - - Organization number (such as "organizations/123") - - Folder number (such as "folders/1234") - - Project number (such as "projects/12345") - - Project ID (such as "projects/abc") - query (str): - Optional. The query statement. - asset_types (MutableSequence[str]): - Optional. A list of asset types that this - request searches for. If empty, it will search - all the supported asset types. - page_size (int): - Optional. The page size for search result pagination. Page - size is capped at 500 even if a larger value is given. If - set to zero, server will pick an appropriate default. - Returned results may be fewer than requested. When this - happens, there could be more results as long as - ``next_page_token`` is returned. - page_token (str): - Optional. If present, then retrieve the next batch of - results from the preceding call to this method. - ``page_token`` must be the value of ``next_page_token`` from - the previous response. The values of all other method - parameters, must be identical to those in the previous call. - order_by (str): - Optional. A comma separated list of fields specifying the - sorting order of the results. The default order is - ascending. Add ``DESC`` after the field name to indicate - descending order. Redundant space characters are ignored. - For example, ``location DESC , name``. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - order_by: str = proto.Field( - proto.STRING, - number=10, - ) - - -class SearchAllResourcesResponse(proto.Message): - r"""Search all resources response. - - Attributes: - results (MutableSequence[google.cloud.asset_v1p1beta1.types.StandardResourceMetadata]): - A list of resource that match the search - query. - next_page_token (str): - If there are more results than those appearing in this - response, then ``next_page_token`` is included. To get the - next set of results, call this method again using the value - of ``next_page_token`` as ``page_token``. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence[assets.StandardResourceMetadata] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=assets.StandardResourceMetadata, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SearchAllIamPoliciesRequest(proto.Message): - r"""Search all IAM policies request. - - Attributes: - scope (str): - Required. The relative name of an asset. The search is - limited to the resources within the ``scope``. The allowed - value must be: - - - Organization number (such as "organizations/123") - - Folder number (such as "folders/1234") - - Project number (such as "projects/12345") - - Project ID (such as "projects/abc") - query (str): - Optional. The query statement. Examples: - - - "policy:myuser@mydomain.com" - - "policy:(myuser@mydomain.com viewer)". - page_size (int): - Optional. The page size for search result pagination. Page - size is capped at 500 even if a larger value is given. If - set to zero, server will pick an appropriate default. - Returned results may be fewer than requested. When this - happens, there could be more results as long as - ``next_page_token`` is returned. - page_token (str): - Optional. If present, retrieve the next batch of results - from the preceding call to this method. ``page_token`` must - be the value of ``next_page_token`` from the previous - response. The values of all other method parameters must be - identical to those in the previous call. - """ - - scope: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class SearchAllIamPoliciesResponse(proto.Message): - r"""Search all IAM policies response. - - Attributes: - results (MutableSequence[google.cloud.asset_v1p1beta1.types.IamPolicySearchResult]): - A list of IAM policies that match the search - query. Related information such as the - associated resource is returned along with the - policy. - next_page_token (str): - Set if there are more results than those appearing in this - response; to get the next set of results, call this method - again, using this value as the ``page_token``. - """ - - @property - def raw_page(self): - return self - - results: MutableSequence[assets.IamPolicySearchResult] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=assets.IamPolicySearchResult, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/assets.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/assets.py deleted file mode 100644 index a0ee1c8baf90..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/google/cloud/asset_v1p1beta1/types/assets.py +++ /dev/null @@ -1,203 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.iam.v1 import policy_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1p1beta1', - manifest={ - 'StandardResourceMetadata', - 'IamPolicySearchResult', - 'Permissions', - }, -) - - -class StandardResourceMetadata(proto.Message): - r"""The standard metadata of a cloud resource. - - Attributes: - name (str): - The full resource name. For example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Resource - Names `__ - for more information. - asset_type (str): - The type of this resource. - For example: "compute.googleapis.com/Disk". - project (str): - The project that this resource belongs to, in the form of - ``projects/{project_number}``. - display_name (str): - The display name of this resource. - description (str): - One or more paragraphs of text description of - this resource. Maximum length could be up to 1M - bytes. - additional_attributes (MutableSequence[str]): - Additional searchable attributes of this - resource. Informational only. The exact set of - attributes is subject to change. For example: - project id, DNS name etc. - location (str): - Location can be "global", regional like - "us-east1", or zonal like "us-west1-b". - labels (MutableMapping[str, str]): - Labels associated with this resource. See `Labelling and - grouping Google Cloud - resources `__ - for more information. - network_tags (MutableSequence[str]): - Network tags associated with this resource. Like labels, - network tags are a type of annotations used to group Google - Cloud resources. See `Labelling Google Cloud - resources `__ - for more information. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=2, - ) - project: str = proto.Field( - proto.STRING, - number=3, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - description: str = proto.Field( - proto.STRING, - number=5, - ) - additional_attributes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=10, - ) - location: str = proto.Field( - proto.STRING, - number=11, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=12, - ) - network_tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - - -class IamPolicySearchResult(proto.Message): - r"""The result for an IAM policy search. - - Attributes: - resource (str): - The `full resource - name `__ - of the resource associated with this IAM policy. - project (str): - The project that the associated Google Cloud resource - belongs to, in the form of ``projects/{project_number}``. If - an IAM policy is set on a resource -- such as a Compute - Engine instance or a Cloud Storage bucket -- the project - field will indicate the project that contains the resource. - If an IAM policy is set on a folder or orgnization, the - project field will be empty. - policy (google.iam.v1.policy_pb2.Policy): - The IAM policy attached to the specified - resource. Note that the original IAM policy can - contain multiple bindings. This only contains - the bindings that match the given query. For - queries that don't contain a constraint on - policies (e.g. an empty query), this contains - all the bindings. - explanation (google.cloud.asset_v1p1beta1.types.IamPolicySearchResult.Explanation): - Explanation about the IAM policy search - result. It contains additional information that - explains why the search result matches the - query. - """ - - class Explanation(proto.Message): - r"""Explanation about the IAM policy search result. - - Attributes: - matched_permissions (MutableMapping[str, google.cloud.asset_v1p1beta1.types.Permissions]): - The map from roles to their included permission matching the - permission query (e.g. containing - ``policy.role.permissions:``). Example role string: - "roles/compute.instanceAdmin". The roles can also be found - in the returned ``policy`` bindings. Note that the map is - populated only if requesting with a permission query. - """ - - matched_permissions: MutableMapping[str, 'Permissions'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='Permissions', - ) - - resource: str = proto.Field( - proto.STRING, - number=1, - ) - project: str = proto.Field( - proto.STRING, - number=3, - ) - policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=4, - message=policy_pb2.Policy, - ) - explanation: Explanation = proto.Field( - proto.MESSAGE, - number=5, - message=Explanation, - ) - - -class Permissions(proto.Message): - r"""IAM permissions. - - Attributes: - permissions (MutableSequence[str]): - A list of permissions. Example permission - string: "compute.disk.get". - """ - - permissions: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/mypy.ini b/owl-bot-staging/google-cloud-asset/v1p1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/noxfile.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/noxfile.py deleted file mode 100644 index 8adcf2625b48..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-asset' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1p1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1p1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_async.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_async.py deleted file mode 100644 index 4b0114a688e9..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllIamPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p1beta1_generated_AssetService_SearchAllIamPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p1beta1 - - -async def sample_search_all_iam_policies(): - # Create a client - client = asset_v1p1beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1p1beta1_generated_AssetService_SearchAllIamPolicies_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_sync.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_sync.py deleted file mode 100644 index 1c171c73e7b6..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllIamPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p1beta1_generated_AssetService_SearchAllIamPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p1beta1 - - -def sample_search_all_iam_policies(): - # Create a client - client = asset_v1p1beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllIamPoliciesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_iam_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1p1beta1_generated_AssetService_SearchAllIamPolicies_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_resources_async.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_resources_async.py deleted file mode 100644 index 91a298e22809..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_resources_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p1beta1_generated_AssetService_SearchAllResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p1beta1 - - -async def sample_search_all_resources(): - # Create a client - client = asset_v1p1beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1p1beta1_generated_AssetService_SearchAllResources_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_resources_sync.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_resources_sync.py deleted file mode 100644 index 0014b2c80291..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/cloudasset_v1p1beta1_generated_asset_service_search_all_resources_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p1beta1_generated_AssetService_SearchAllResources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p1beta1 - - -def sample_search_all_resources(): - # Create a client - client = asset_v1p1beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p1beta1.SearchAllResourcesRequest( - scope="scope_value", - ) - - # Make the request - page_result = client.search_all_resources(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1p1beta1_generated_AssetService_SearchAllResources_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json deleted file mode 100644 index 13d603d43770..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ /dev/null @@ -1,361 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.asset.v1p1beta1", - "version": "v1p1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-asset", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceAsyncClient.search_all_iam_policies", - "method": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService.SearchAllIamPolicies", - "service": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllIamPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager", - "shortName": "search_all_iam_policies" - }, - "description": "Sample for SearchAllIamPolicies", - "file": "cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p1beta1_generated_AssetService_SearchAllIamPolicies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceClient.search_all_iam_policies", - "method": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService.SearchAllIamPolicies", - "service": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllIamPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p1beta1.types.SearchAllIamPoliciesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllIamPoliciesPager", - "shortName": "search_all_iam_policies" - }, - "description": "Sample for SearchAllIamPolicies", - "file": "cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p1beta1_generated_AssetService_SearchAllIamPolicies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p1beta1_generated_asset_service_search_all_iam_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceAsyncClient.search_all_resources", - "method": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService.SearchAllResources", - "service": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p1beta1.types.SearchAllResourcesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "asset_types", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllResourcesAsyncPager", - "shortName": "search_all_resources" - }, - "description": "Sample for SearchAllResources", - "file": "cloudasset_v1p1beta1_generated_asset_service_search_all_resources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p1beta1_generated_AssetService_SearchAllResources_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p1beta1_generated_asset_service_search_all_resources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p1beta1.AssetServiceClient.search_all_resources", - "method": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService.SearchAllResources", - "service": { - "fullName": "google.cloud.asset.v1p1beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "SearchAllResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p1beta1.types.SearchAllResourcesRequest" - }, - { - "name": "scope", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "asset_types", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p1beta1.services.asset_service.pagers.SearchAllResourcesPager", - "shortName": "search_all_resources" - }, - "description": "Sample for SearchAllResources", - "file": "cloudasset_v1p1beta1_generated_asset_service_search_all_resources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p1beta1_generated_AssetService_SearchAllResources_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p1beta1_generated_asset_service_search_all_resources_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/scripts/fixup_asset_v1p1beta1_keywords.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/scripts/fixup_asset_v1p1beta1_keywords.py deleted file mode 100644 index 28463824c83c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/scripts/fixup_asset_v1p1beta1_keywords.py +++ /dev/null @@ -1,177 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assetCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', ), - 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assetCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the asset client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/setup.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/setup.py deleted file mode 100644 index f74a55618d95..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-asset' - - -description = "Google Cloud Asset API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/asset_v1p1beta1/__init__.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/asset_v1p1beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/asset_v1p1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py b/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py deleted file mode 100644 index 98c0876c04dd..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p1beta1/tests/unit/gapic/asset_v1p1beta1/test_asset_service.py +++ /dev/null @@ -1,3342 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.asset_v1p1beta1.services.asset_service import AssetServiceAsyncClient -from google.cloud.asset_v1p1beta1.services.asset_service import AssetServiceClient -from google.cloud.asset_v1p1beta1.services.asset_service import pagers -from google.cloud.asset_v1p1beta1.services.asset_service import transports -from google.cloud.asset_v1p1beta1.types import asset_service -from google.cloud.asset_v1p1beta1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AssetServiceClient._get_default_mtls_endpoint(None) is None - assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssetServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AssetServiceClient._get_client_cert_source(None, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssetServiceGrpcTransport, "grpc"), - (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -def test_asset_service_client_get_transport_class(): - transport = AssetServiceClient.get_transport_class() - available_transports = [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceRestTransport, - ] - assert transport in available_transports - - transport = AssetServiceClient.get_transport_class("grpc") - assert transport == transports.AssetServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) -def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), -]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_asset_service_client_client_options_from_dict(): - with mock.patch('google.cloud.asset_v1p1beta1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AssetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - ) - response = client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_all_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', - ) - -def test_search_all_resources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc - request = {} - client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_rpc - - request = {} - await client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_all_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_all_resources_async_from_dict(): - await test_search_all_resources_async(request_type=dict) - -def test_search_all_resources_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllResourcesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = asset_service.SearchAllResourcesResponse() - client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_all_resources_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllResourcesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) - await client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_search_all_resources_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllResourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - arg = args[0].asset_types - mock_val = ['asset_types_value'] - assert arg == mock_val - - -def test_search_all_resources_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - -@pytest.mark.asyncio -async def test_search_all_resources_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllResourcesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - arg = args[0].asset_types - mock_val = ['asset_types_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_all_resources_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - -def test_search_all_resources_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.search_all_resources(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.StandardResourceMetadata) - for i in results) -def test_search_all_resources_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - ), - RuntimeError, - ) - pages = list(client.search_all_resources(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_all_resources_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_all_resources(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assets.StandardResourceMetadata) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_all_resources_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_all_resources(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - ) - response = client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllIamPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_iam_policies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - ) - -def test_search_all_iam_policies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc - request = {} - client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_rpc - - request = {} - await client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_all_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.SearchAllIamPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_from_dict(): - await test_search_all_iam_policies_async(request_type=dict) - -def test_search_all_iam_policies_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllIamPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = asset_service.SearchAllIamPoliciesResponse() - client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_all_iam_policies_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.SearchAllIamPoliciesRequest() - - request.scope = 'scope_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) - await client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'scope=scope_value', - ) in kw['metadata'] - - -def test_search_all_iam_policies_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllIamPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_all_iam_policies( - scope='scope_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_all_iam_policies_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_all_iam_policies_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.SearchAllIamPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_all_iam_policies( - scope='scope_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].scope - mock_val = 'scope_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_all_iam_policies_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', - ) - - -def test_search_all_iam_policies_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('scope', ''), - )), - ) - pager = client.search_all_iam_policies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) -def test_search_all_iam_policies_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - pages = list(client.search_all_iam_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_all_iam_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_all_iam_policies_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_all_iam_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_search_all_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc - - request = {} - client.search_all_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["scope"] = 'scope_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_all_resources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_all_resources_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_all_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) - - -def test_search_all_resources_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_all_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p1beta1/{scope=*/*}/resources:searchAll" % client.transport._host, args[1]) - - -def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - - -def test_search_all_resources_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.StandardResourceMetadata(), - assets.StandardResourceMetadata(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.search_all_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.StandardResourceMetadata) - for i in results) - - pages = list(client.search_all_resources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_iam_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc - - request = {} - client.search_all_iam_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_iam_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["scope"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["scope"] = 'scope_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "query", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_all_iam_policies(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_all_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "query", )) & set(("scope", ))) - - -def test_search_all_iam_policies_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - query='query_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_all_iam_policies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p1beta1/{scope=*/*}/iamPolicies:searchAll" % client.transport._host, args[1]) - - -def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', - ) - - -def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.search_all_iam_policies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) - for i in results) - - pages = list(client.search_all_iam_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AssetServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AssetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AssetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_resources_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value = asset_service.SearchAllResourcesResponse() - client.search_all_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_iam_policies_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value = asset_service.SearchAllIamPoliciesResponse() - client.search_all_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllIamPoliciesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_all_resources_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_all_iam_policies_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllIamPoliciesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AssetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_resources(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_resources_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) - req.return_value.content = return_value - - request = asset_service.SearchAllResourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllResourcesResponse() - post_with_metadata.return_value = asset_service.SearchAllResourcesResponse(), metadata - - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_iam_policies(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) - req.return_value.content = return_value - - request = asset_service.SearchAllIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllIamPoliciesResponse() - post_with_metadata.return_value = asset_service.SearchAllIamPoliciesResponse(), metadata - - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_resources_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - client.search_all_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_iam_policies_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - client.search_all_iam_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.SearchAllIamPoliciesRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AssetServiceGrpcTransport, - ) - -def test_asset_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_asset_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.asset_v1p1beta1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'search_all_resources', - 'search_all_iam_policies', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_asset_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1p1beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_asset_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1p1beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport() - adc.assert_called_once() - - -def test_asset_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - ], -) -def test_asset_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, - ], -) -def test_asset_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AssetServiceGrpcTransport, grpc_helpers), - (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_asset_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_asset_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_no_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_with_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_asset_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AssetServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AssetServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.search_all_resources._session - session2 = client2.transport.search_all_resources._session - assert session1 != session2 - session1 = client1.transport.search_all_iam_policies._session - session2 = client2.transport.search_all_iam_policies._session - assert session1 != session2 -def test_asset_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_asset_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AssetServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = AssetServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = AssetServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = AssetServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AssetServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = AssetServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = AssetServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = AssetServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AssetServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = AssetServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AssetServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/.coveragerc b/owl-bot-staging/google-cloud-asset/v1p2beta1/.coveragerc deleted file mode 100644 index 801f6d8a1da7..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/asset/__init__.py - google/cloud/asset/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/.flake8 b/owl-bot-staging/google-cloud-asset/v1p2beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/MANIFEST.in b/owl-bot-staging/google-cloud-asset/v1p2beta1/MANIFEST.in deleted file mode 100644 index ff48182d1920..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/asset *.py -recursive-include google/cloud/asset_v1p2beta1 *.py diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/README.rst b/owl-bot-staging/google-cloud-asset/v1p2beta1/README.rst deleted file mode 100644 index a10b3ef1e958..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Asset API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Asset API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/asset_service.rst b/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/asset_service.rst deleted file mode 100644 index 21bc405ac5c2..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/asset_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -AssetService ------------------------------- - -.. automodule:: google.cloud.asset_v1p2beta1.services.asset_service - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/services_.rst b/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/services_.rst deleted file mode 100644 index f8ff126c0693..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Asset v1p2beta1 API -============================================= -.. toctree:: - :maxdepth: 2 - - asset_service diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/types_.rst b/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/types_.rst deleted file mode 100644 index cfbf146ef62f..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/asset_v1p2beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Asset v1p2beta1 API -========================================== - -.. automodule:: google.cloud.asset_v1p2beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/conf.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/conf.py deleted file mode 100644 index fa7647914fb5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-asset documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-asset" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-asset-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-asset.tex", - u"google-cloud-asset Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-asset", - u"Google Cloud Asset Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-asset", - u"google-cloud-asset Documentation", - author, - "google-cloud-asset", - "GAPIC library for Google Cloud Asset API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/index.rst b/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/index.rst deleted file mode 100644 index be6edf55dcb7..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - asset_v1p2beta1/services_ - asset_v1p2beta1/types_ diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/__init__.py deleted file mode 100644 index f631565d3376..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/__init__.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.asset_v1p2beta1.services.asset_service.client import AssetServiceClient -from google.cloud.asset_v1p2beta1.services.asset_service.async_client import AssetServiceAsyncClient - -from google.cloud.asset_v1p2beta1.types.asset_service import BatchGetAssetsHistoryResponse -from google.cloud.asset_v1p2beta1.types.asset_service import CreateFeedRequest -from google.cloud.asset_v1p2beta1.types.asset_service import DeleteFeedRequest -from google.cloud.asset_v1p2beta1.types.asset_service import ExportAssetsResponse -from google.cloud.asset_v1p2beta1.types.asset_service import Feed -from google.cloud.asset_v1p2beta1.types.asset_service import FeedOutputConfig -from google.cloud.asset_v1p2beta1.types.asset_service import GcsDestination -from google.cloud.asset_v1p2beta1.types.asset_service import GetFeedRequest -from google.cloud.asset_v1p2beta1.types.asset_service import ListFeedsRequest -from google.cloud.asset_v1p2beta1.types.asset_service import ListFeedsResponse -from google.cloud.asset_v1p2beta1.types.asset_service import OutputConfig -from google.cloud.asset_v1p2beta1.types.asset_service import PubsubDestination -from google.cloud.asset_v1p2beta1.types.asset_service import UpdateFeedRequest -from google.cloud.asset_v1p2beta1.types.asset_service import ContentType -from google.cloud.asset_v1p2beta1.types.assets import Asset -from google.cloud.asset_v1p2beta1.types.assets import Resource -from google.cloud.asset_v1p2beta1.types.assets import TemporalAsset -from google.cloud.asset_v1p2beta1.types.assets import TimeWindow - -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'DeleteFeedRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'OutputConfig', - 'PubsubDestination', - 'UpdateFeedRequest', - 'ContentType', - 'Asset', - 'Resource', - 'TemporalAsset', - 'TimeWindow', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/py.typed b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/__init__.py deleted file mode 100644 index 894c1db70b6c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset_v1p2beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.asset_service import AssetServiceClient -from .services.asset_service import AssetServiceAsyncClient - -from .types.asset_service import BatchGetAssetsHistoryResponse -from .types.asset_service import CreateFeedRequest -from .types.asset_service import DeleteFeedRequest -from .types.asset_service import ExportAssetsResponse -from .types.asset_service import Feed -from .types.asset_service import FeedOutputConfig -from .types.asset_service import GcsDestination -from .types.asset_service import GetFeedRequest -from .types.asset_service import ListFeedsRequest -from .types.asset_service import ListFeedsResponse -from .types.asset_service import OutputConfig -from .types.asset_service import PubsubDestination -from .types.asset_service import UpdateFeedRequest -from .types.asset_service import ContentType -from .types.assets import Asset -from .types.assets import Resource -from .types.assets import TemporalAsset -from .types.assets import TimeWindow - -__all__ = ( - 'AssetServiceAsyncClient', -'Asset', -'AssetServiceClient', -'BatchGetAssetsHistoryResponse', -'ContentType', -'CreateFeedRequest', -'DeleteFeedRequest', -'ExportAssetsResponse', -'Feed', -'FeedOutputConfig', -'GcsDestination', -'GetFeedRequest', -'ListFeedsRequest', -'ListFeedsResponse', -'OutputConfig', -'PubsubDestination', -'Resource', -'TemporalAsset', -'TimeWindow', -'UpdateFeedRequest', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/gapic_metadata.json deleted file mode 100644 index 792efcd73906..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.asset_v1p2beta1", - "protoPackage": "google.cloud.asset.v1p2beta1", - "schema": "1.0", - "services": { - "AssetService": { - "clients": { - "grpc": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "CreateFeed": { - "methods": [ - "create_feed" - ] - }, - "DeleteFeed": { - "methods": [ - "delete_feed" - ] - }, - "GetFeed": { - "methods": [ - "get_feed" - ] - }, - "ListFeeds": { - "methods": [ - "list_feeds" - ] - }, - "UpdateFeed": { - "methods": [ - "update_feed" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AssetServiceAsyncClient", - "rpcs": { - "CreateFeed": { - "methods": [ - "create_feed" - ] - }, - "DeleteFeed": { - "methods": [ - "delete_feed" - ] - }, - "GetFeed": { - "methods": [ - "get_feed" - ] - }, - "ListFeeds": { - "methods": [ - "list_feeds" - ] - }, - "UpdateFeed": { - "methods": [ - "update_feed" - ] - } - } - }, - "rest": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "CreateFeed": { - "methods": [ - "create_feed" - ] - }, - "DeleteFeed": { - "methods": [ - "delete_feed" - ] - }, - "GetFeed": { - "methods": [ - "get_feed" - ] - }, - "ListFeeds": { - "methods": [ - "list_feeds" - ] - }, - "UpdateFeed": { - "methods": [ - "update_feed" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/py.typed b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/__init__.py deleted file mode 100644 index 1ad75a011889..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AssetServiceClient -from .async_client import AssetServiceAsyncClient - -__all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py deleted file mode 100644 index 15741222b340..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py +++ /dev/null @@ -1,870 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.asset_v1p2beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .client import AssetServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AssetServiceAsyncClient: - """Asset service definition.""" - - _client: AssetServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AssetServiceClient._DEFAULT_UNIVERSE - - feed_path = staticmethod(AssetServiceClient.feed_path) - parse_feed_path = staticmethod(AssetServiceClient.parse_feed_path) - common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AssetServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AssetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AssetServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) - common_location_path = staticmethod(AssetServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AssetServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AssetServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1p2beta1.AssetServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "credentialsType": None, - } - ) - - async def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - async def sample_create_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = await client.create_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p2beta1.types.CreateFeedRequest, dict]]): - The request object. Create asset feed request. - parent (:class:`str`): - Required. The name of the - project/folder/organization where this - feed should be created in. It can only - be an organization number (such as - "organizations/123"), a folder number - (such as "folders/123"), a project ID - (such as "projects/my-project-id"), or a - project number (such as - "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.CreateFeedRequest): - request = asset_service.CreateFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Gets details about an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - async def sample_get_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = await client.get_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p2beta1.types.GetFeedRequest, dict]]): - The request object. Get asset feed request. - name (:class:`str`): - Required. The name of the Feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.GetFeedRequest): - request = asset_service.GetFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: - r"""Lists all asset feeds in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - async def sample_list_feeds(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_feeds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p2beta1.types.ListFeedsRequest, dict]]): - The request object. List asset feeds request. - parent (:class:`str`): - Required. The parent - project/folder/organization whose feeds - are to be listed. It can only be using - project/folder/organization number (such - as "folders/12345")", or a project ID - (such as "projects/my-project-id"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.ListFeedsResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListFeedsRequest): - request = asset_service.ListFeedsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_feeds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Updates an asset feed configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - async def sample_update_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = await client.update_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p2beta1.types.UpdateFeedRequest, dict]]): - The request object. Update asset feed request. - feed (:class:`google.cloud.asset_v1p2beta1.types.Feed`): - Required. The new values of feed details. It must match - an existing feed and the field ``name`` must be in the - format of: projects/project_number/feeds/feed_id or - folders/folder_number/feeds/feed_id or - organizations/organization_number/feeds/feed_id. - - This corresponds to the ``feed`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.UpdateFeedRequest): - request = asset_service.UpdateFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if feed is not None: - request.feed = feed - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - async def sample_delete_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - await client.delete_feed(request=request) - - Args: - request (Optional[Union[google.cloud.asset_v1p2beta1.types.DeleteFeedRequest, dict]]): - The request object. - name (:class:`str`): - Required. The name of the feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.DeleteFeedRequest): - request = asset_service.DeleteFeedRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AssetServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/client.py deleted file mode 100644 index 1186077909cc..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ /dev/null @@ -1,1233 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.asset_v1p2beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AssetServiceGrpcTransport -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .transports.rest import AssetServiceRestTransport - - -class AssetServiceClientMeta(type): - """Metaclass for the AssetService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] - _transport_registry["grpc"] = AssetServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AssetServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AssetServiceClient(metaclass=AssetServiceClientMeta): - """Asset service definition.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "cloudasset.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def feed_path(project: str,feed: str,) -> str: - """Returns a fully-qualified feed string.""" - return "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) - - @staticmethod - def parse_feed_path(path: str) -> Dict[str,str]: - """Parses a feed path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AssetServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AssetServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() - self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AssetServiceTransport) - if transport_provided: - # transport is a AssetServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AssetServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( - AssetServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AssetServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1p2beta1.AssetServiceClient`.", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "credentialsType": None, - } - ) - - def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - def sample_create_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = client.create_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1p2beta1.types.CreateFeedRequest, dict]): - The request object. Create asset feed request. - parent (str): - Required. The name of the - project/folder/organization where this - feed should be created in. It can only - be an organization number (such as - "organizations/123"), a folder number - (such as "folders/123"), a project ID - (such as "projects/my-project-id"), or a - project number (such as - "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.CreateFeedRequest): - request = asset_service.CreateFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Gets details about an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - def sample_get_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = client.get_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1p2beta1.types.GetFeedRequest, dict]): - The request object. Get asset feed request. - name (str): - Required. The name of the Feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.GetFeedRequest): - request = asset_service.GetFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: - r"""Lists all asset feeds in a parent - project/folder/organization. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - def sample_list_feeds(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_feeds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1p2beta1.types.ListFeedsRequest, dict]): - The request object. List asset feeds request. - parent (str): - Required. The parent - project/folder/organization whose feeds - are to be listed. It can only be using - project/folder/organization number (such - as "folders/12345")", or a project ID - (such as "projects/my-project-id"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.ListFeedsResponse: - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListFeedsRequest): - request = asset_service.ListFeedsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_feeds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: - r"""Updates an asset feed configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - def sample_update_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = client.update_feed(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.asset_v1p2beta1.types.UpdateFeedRequest, dict]): - The request object. Update asset feed request. - feed (google.cloud.asset_v1p2beta1.types.Feed): - Required. The new values of feed details. It must match - an existing feed and the field ``name`` must be in the - format of: projects/project_number/feeds/feed_id or - folders/folder_number/feeds/feed_id or - organizations/organization_number/feeds/feed_id. - - This corresponds to the ``feed`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p2beta1.types.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [feed] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.UpdateFeedRequest): - request = asset_service.UpdateFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if feed is not None: - request.feed = feed - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("feed.name", request.feed.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an asset feed. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p2beta1 - - def sample_delete_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - client.delete_feed(request=request) - - Args: - request (Union[google.cloud.asset_v1p2beta1.types.DeleteFeedRequest, dict]): - The request object. - name (str): - Required. The name of the feed and it must be in the - format of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.DeleteFeedRequest): - request = asset_service.DeleteFeedRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_feed] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "AssetServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/README.rst b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/README.rst deleted file mode 100644 index f0467812ea79..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AssetServiceTransport` is the ABC for all transports. -- public child `AssetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AssetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAssetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AssetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/__init__.py deleted file mode 100644 index 315eb22bd6cb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AssetServiceTransport -from .grpc import AssetServiceGrpcTransport -from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .rest import AssetServiceRestTransport -from .rest import AssetServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport - -__all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py deleted file mode 100644 index 2323afe2c427..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/base.py +++ /dev/null @@ -1,256 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.asset_v1p2beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AssetServiceTransport(abc.ABC): - """Abstract transport class for AssetService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudasset.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_feed: gapic_v1.method.wrap_method( - self.create_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.get_feed: gapic_v1.method.wrap_method( - self.get_feed, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_feeds: gapic_v1.method.wrap_method( - self.list_feeds, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_feed: gapic_v1.method.wrap_method( - self.update_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_feed: gapic_v1.method.wrap_method( - self.delete_feed, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: - raise NotImplementedError() - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: - raise NotImplementedError() - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Union[ - asset_service.ListFeedsResponse, - Awaitable[asset_service.ListFeedsResponse] - ]]: - raise NotImplementedError() - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: - raise NotImplementedError() - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AssetServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py deleted file mode 100644 index 8d27f73af156..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc.py +++ /dev/null @@ -1,474 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcTransport(AssetServiceTransport): - """gRPC backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: - r"""Return a callable for the create feed method over gRPC. - - Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - Returns: - Callable[[~.CreateFeedRequest], - ~.Feed]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/CreateFeed', - request_serializer=asset_service.CreateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['create_feed'] - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: - r"""Return a callable for the get feed method over gRPC. - - Gets details about an asset feed. - - Returns: - Callable[[~.GetFeedRequest], - ~.Feed]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/GetFeed', - request_serializer=asset_service.GetFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['get_feed'] - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: - r"""Return a callable for the list feeds method over gRPC. - - Lists all asset feeds in a parent - project/folder/organization. - - Returns: - Callable[[~.ListFeedsRequest], - ~.ListFeedsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/ListFeeds', - request_serializer=asset_service.ListFeedsRequest.serialize, - response_deserializer=asset_service.ListFeedsResponse.deserialize, - ) - return self._stubs['list_feeds'] - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: - r"""Return a callable for the update feed method over gRPC. - - Updates an asset feed configuration. - - Returns: - Callable[[~.UpdateFeedRequest], - ~.Feed]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/UpdateFeed', - request_serializer=asset_service.UpdateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['update_feed'] - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete feed method over gRPC. - - Deletes an asset feed. - - Returns: - Callable[[~.DeleteFeedRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/DeleteFeed', - request_serializer=asset_service.DeleteFeedRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_feed'] - - def close(self): - self._logged_channel.close() - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AssetServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py deleted file mode 100644 index 5aea57e41720..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,549 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AssetServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): - """gRPC AsyncIO backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Awaitable[asset_service.Feed]]: - r"""Return a callable for the create feed method over gRPC. - - Creates a feed in a parent - project/folder/organization to listen to its asset - updates. - - Returns: - Callable[[~.CreateFeedRequest], - Awaitable[~.Feed]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/CreateFeed', - request_serializer=asset_service.CreateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['create_feed'] - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Awaitable[asset_service.Feed]]: - r"""Return a callable for the get feed method over gRPC. - - Gets details about an asset feed. - - Returns: - Callable[[~.GetFeedRequest], - Awaitable[~.Feed]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/GetFeed', - request_serializer=asset_service.GetFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['get_feed'] - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Awaitable[asset_service.ListFeedsResponse]]: - r"""Return a callable for the list feeds method over gRPC. - - Lists all asset feeds in a parent - project/folder/organization. - - Returns: - Callable[[~.ListFeedsRequest], - Awaitable[~.ListFeedsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/ListFeeds', - request_serializer=asset_service.ListFeedsRequest.serialize, - response_deserializer=asset_service.ListFeedsResponse.deserialize, - ) - return self._stubs['list_feeds'] - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Awaitable[asset_service.Feed]]: - r"""Return a callable for the update feed method over gRPC. - - Updates an asset feed configuration. - - Returns: - Callable[[~.UpdateFeedRequest], - Awaitable[~.Feed]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/UpdateFeed', - request_serializer=asset_service.UpdateFeedRequest.serialize, - response_deserializer=asset_service.Feed.deserialize, - ) - return self._stubs['update_feed'] - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete feed method over gRPC. - - Deletes an asset feed. - - Returns: - Callable[[~.DeleteFeedRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p2beta1.AssetService/DeleteFeed', - request_serializer=asset_service.DeleteFeedRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_feed'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_feed: self._wrap_method( - self.create_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.get_feed: self._wrap_method( - self.get_feed, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_feeds: self._wrap_method( - self.list_feeds, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_feed: self._wrap_method( - self.update_feed, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_feed: self._wrap_method( - self.delete_feed, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py deleted file mode 100644 index 6943efdf9c73..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest.py +++ /dev/null @@ -1,1136 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAssetServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AssetServiceRestInterceptor: - """Interceptor for AssetService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AssetServiceRestTransport. - - .. code-block:: python - class MyCustomAssetServiceInterceptor(AssetServiceRestInterceptor): - def pre_create_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_feed(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_feed(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_feeds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_feeds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_feed(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_feed(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) - client = AssetServiceClient(transport=transport) - - - """ - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: - """Post-rpc interceptor for create_feed - - DEPRECATED. Please use the `post_create_feed_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_create_feed` interceptor runs - before the `post_create_feed_with_metadata` interceptor. - """ - return response - - def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_feed - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_create_feed_with_metadata` - interceptor in new development instead of the `post_create_feed` interceptor. - When both interceptors are used, this `post_create_feed_with_metadata` interceptor runs after the - `post_create_feed` interceptor. The (possibly modified) response returned by - `post_create_feed` will be passed to - `post_create_feed_with_metadata`. - """ - return response, metadata - - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: - """Post-rpc interceptor for get_feed - - DEPRECATED. Please use the `post_get_feed_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_get_feed` interceptor runs - before the `post_get_feed_with_metadata` interceptor. - """ - return response - - def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_feed - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_get_feed_with_metadata` - interceptor in new development instead of the `post_get_feed` interceptor. - When both interceptors are used, this `post_get_feed_with_metadata` interceptor runs after the - `post_get_feed` interceptor. The (possibly modified) response returned by - `post_get_feed` will be passed to - `post_get_feed_with_metadata`. - """ - return response, metadata - - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_feeds - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: - """Post-rpc interceptor for list_feeds - - DEPRECATED. Please use the `post_list_feeds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_list_feeds` interceptor runs - before the `post_list_feeds_with_metadata` interceptor. - """ - return response - - def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_feeds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_list_feeds_with_metadata` - interceptor in new development instead of the `post_list_feeds` interceptor. - When both interceptors are used, this `post_list_feeds_with_metadata` interceptor runs after the - `post_list_feeds` interceptor. The (possibly modified) response returned by - `post_list_feeds` will be passed to - `post_list_feeds_with_metadata`. - """ - return response, metadata - - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_feed - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: - """Post-rpc interceptor for update_feed - - DEPRECATED. Please use the `post_update_feed_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_update_feed` interceptor runs - before the `post_update_feed_with_metadata` interceptor. - """ - return response - - def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_feed - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_update_feed_with_metadata` - interceptor in new development instead of the `post_update_feed` interceptor. - When both interceptors are used, this `post_update_feed_with_metadata` interceptor runs after the - `post_update_feed` interceptor. The (possibly modified) response returned by - `post_update_feed` will be passed to - `post_update_feed_with_metadata`. - """ - return response, metadata - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AssetServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AssetServiceRestInterceptor - - -class AssetServiceRestTransport(_BaseAssetServiceRestTransport): - """REST backend synchronous transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AssetServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.CreateFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: - r"""Call the create feed method over HTTP. - - Args: - request (~.asset_service.CreateFeedRequest): - The request object. Create asset feed request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() - - request, metadata = self._interceptor.pre_create_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p2beta1.AssetServiceClient.CreateFeed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "CreateFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_feed(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.Feed.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p2beta1.AssetServiceClient.create_feed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "CreateFeed", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.DeleteFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete feed method over HTTP. - - Args: - request (~.asset_service.DeleteFeedRequest): - The request object. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() - - request, metadata = self._interceptor.pre_delete_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p2beta1.AssetServiceClient.DeleteFeed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "DeleteFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetFeed(_BaseAssetServiceRestTransport._BaseGetFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.GetFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.GetFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: - r"""Call the get feed method over HTTP. - - Args: - request (~.asset_service.GetFeedRequest): - The request object. Get asset feed request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() - - request, metadata = self._interceptor.pre_get_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p2beta1.AssetServiceClient.GetFeed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "GetFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_feed(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.Feed.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p2beta1.AssetServiceClient.get_feed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "GetFeed", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.ListFeeds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListFeedsResponse: - r"""Call the list feeds method over HTTP. - - Args: - request (~.asset_service.ListFeedsRequest): - The request object. List asset feeds request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.ListFeedsResponse: - - """ - - http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() - - request, metadata = self._interceptor.pre_list_feeds(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p2beta1.AssetServiceClient.ListFeeds", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "ListFeeds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.ListFeedsResponse() - pb_resp = asset_service.ListFeedsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_feeds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.ListFeedsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p2beta1.AssetServiceClient.list_feeds", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "ListFeeds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.UpdateFeed") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: - r"""Call the update feed method over HTTP. - - Args: - request (~.asset_service.UpdateFeedRequest): - The request object. Update asset feed request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - """ - - http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() - - request, metadata = self._interceptor.pre_update_feed(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) - - body = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p2beta1.AssetServiceClient.UpdateFeed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "UpdateFeed", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_feed(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.Feed.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p2beta1.AssetServiceClient.update_feed", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "UpdateFeed", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p2beta1.AssetServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p2beta1.AssetServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.asset.v1p2beta1.AssetService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest_base.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest_base.py deleted file mode 100644 index 78d10b90c40c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/services/asset_service/transports/rest_base.py +++ /dev/null @@ -1,323 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.asset_v1p2beta1.types import asset_service -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAssetServiceRestTransport(AssetServiceTransport): - """Base REST backend transport for AssetService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1p2beta1/{parent=*/*}/feeds', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.CreateFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1p2beta1/{name=*/*/feeds/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.DeleteFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1p2beta1/{name=*/*/feeds/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.GetFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListFeeds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1p2beta1/{parent=*/*}/feeds', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.ListFeedsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateFeed: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1p2beta1/{feed.name=*/*/feeds/*}', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.UpdateFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1p2beta1/{name=*/*/operations/*/**}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/__init__.py deleted file mode 100644 index 65dc1f542300..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/__init__.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .asset_service import ( - BatchGetAssetsHistoryResponse, - CreateFeedRequest, - DeleteFeedRequest, - ExportAssetsResponse, - Feed, - FeedOutputConfig, - GcsDestination, - GetFeedRequest, - ListFeedsRequest, - ListFeedsResponse, - OutputConfig, - PubsubDestination, - UpdateFeedRequest, - ContentType, -) -from .assets import ( - Asset, - Resource, - TemporalAsset, - TimeWindow, -) - -__all__ = ( - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'DeleteFeedRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'OutputConfig', - 'PubsubDestination', - 'UpdateFeedRequest', - 'ContentType', - 'Asset', - 'Resource', - 'TemporalAsset', - 'TimeWindow', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/asset_service.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/asset_service.py deleted file mode 100644 index 415c642a0903..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/asset_service.py +++ /dev/null @@ -1,392 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.asset_v1p2beta1.types import assets as gca_assets -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1p2beta1', - manifest={ - 'ContentType', - 'ExportAssetsResponse', - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'UpdateFeedRequest', - 'DeleteFeedRequest', - 'OutputConfig', - 'GcsDestination', - 'PubsubDestination', - 'FeedOutputConfig', - 'Feed', - }, -) - - -class ContentType(proto.Enum): - r"""Asset content type. - - Values: - CONTENT_TYPE_UNSPECIFIED (0): - Unspecified content type. - RESOURCE (1): - Resource metadata. - IAM_POLICY (2): - The actual IAM policy set on a resource. - """ - CONTENT_TYPE_UNSPECIFIED = 0 - RESOURCE = 1 - IAM_POLICY = 2 - - -class ExportAssetsResponse(proto.Message): - r"""The export asset response. This message is returned by the - [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] - method in the returned - [google.longrunning.Operation.response][google.longrunning.Operation.response] - field. - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Time the snapshot was taken. - output_config (google.cloud.asset_v1p2beta1.types.OutputConfig): - Output configuration indicating where the - results were output to. - """ - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - output_config: 'OutputConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='OutputConfig', - ) - - -class BatchGetAssetsHistoryResponse(proto.Message): - r"""Batch get assets history response. - - Attributes: - assets (MutableSequence[google.cloud.asset_v1p2beta1.types.TemporalAsset]): - A list of assets with valid time windows. - """ - - assets: MutableSequence[gca_assets.TemporalAsset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_assets.TemporalAsset, - ) - - -class CreateFeedRequest(proto.Message): - r"""Create asset feed request. - - Attributes: - parent (str): - Required. The name of the - project/folder/organization where this feed - should be created in. It can only be an - organization number (such as - "organizations/123"), a folder number (such as - "folders/123"), a project ID (such as - "projects/my-project-id"), or a project number - (such as "projects/12345"). - feed_id (str): - Required. This is the client-assigned asset - feed identifier and it needs to be unique under - a specific parent project/folder/organization. - feed (google.cloud.asset_v1p2beta1.types.Feed): - Required. The feed details. The field ``name`` must be empty - and it will be generated in the format of: - projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - feed_id: str = proto.Field( - proto.STRING, - number=2, - ) - feed: 'Feed' = proto.Field( - proto.MESSAGE, - number=3, - message='Feed', - ) - - -class GetFeedRequest(proto.Message): - r"""Get asset feed request. - - Attributes: - name (str): - Required. The name of the Feed and it must be in the format - of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFeedsRequest(proto.Message): - r"""List asset feeds request. - - Attributes: - parent (str): - Required. The parent - project/folder/organization whose feeds are to - be listed. It can only be using - project/folder/organization number (such as - "folders/12345")", or a project ID (such as - "projects/my-project-id"). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListFeedsResponse(proto.Message): - r""" - - Attributes: - feeds (MutableSequence[google.cloud.asset_v1p2beta1.types.Feed]): - A list of feeds. - """ - - feeds: MutableSequence['Feed'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Feed', - ) - - -class UpdateFeedRequest(proto.Message): - r"""Update asset feed request. - - Attributes: - feed (google.cloud.asset_v1p2beta1.types.Feed): - Required. The new values of feed details. It must match an - existing feed and the field ``name`` must be in the format - of: projects/project_number/feeds/feed_id or - folders/folder_number/feeds/feed_id or - organizations/organization_number/feeds/feed_id. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Only updates the ``feed`` fields indicated by this - mask. The field mask must not be empty, and it must not - contain fields that are immutable or only set by the server. - """ - - feed: 'Feed' = proto.Field( - proto.MESSAGE, - number=1, - message='Feed', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteFeedRequest(proto.Message): - r""" - - Attributes: - name (str): - Required. The name of the feed and it must be in the format - of: projects/project_number/feeds/feed_id - folders/folder_number/feeds/feed_id - organizations/organization_number/feeds/feed_id - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OutputConfig(proto.Message): - r"""Output configuration for export assets destination. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.asset_v1p2beta1.types.GcsDestination): - Destination on Cloud Storage. - - This field is a member of `oneof`_ ``destination``. - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - - -class GcsDestination(proto.Message): - r"""A Cloud Storage location. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - uri (str): - The URI of the Cloud Storage object. It's the same URI that - is used by gsutil. For example: - "gs://bucket_name/object_name". See `Viewing and Editing - Object - Metadata `__ - for more information. - - This field is a member of `oneof`_ ``object_uri``. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - oneof='object_uri', - ) - - -class PubsubDestination(proto.Message): - r"""A Pub/Sub destination. - - Attributes: - topic (str): - The name of the Pub/Sub topic to publish to. For example: - ``projects/PROJECT_ID/topics/TOPIC_ID``. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - -class FeedOutputConfig(proto.Message): - r"""Output configuration for asset feed destination. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - pubsub_destination (google.cloud.asset_v1p2beta1.types.PubsubDestination): - Destination on Pub/Sub. - - This field is a member of `oneof`_ ``destination``. - """ - - pubsub_destination: 'PubsubDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='PubsubDestination', - ) - - -class Feed(proto.Message): - r"""An asset feed used to export asset updates to a destinations. - An asset feed filter controls what updates are exported. The - asset feed must be created within a project, organization, or - folder. Supported destinations are: - - Cloud Pub/Sub topics. - - Attributes: - name (str): - Required. The format will be - projects/{project_number}/feeds/{client-assigned_feed_identifier} - or - folders/{folder_number}/feeds/{client-assigned_feed_identifier} - or - organizations/{organization_number}/feeds/{client-assigned_feed_identifier} - - The client-assigned feed identifier must be unique within - the parent project/folder/organization. - asset_names (MutableSequence[str]): - A list of the full names of the assets to receive updates. - You must specify either or both of asset_names and - asset_types. Only asset updates matching specified - asset_names or asset_types are exported to the feed. For - example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Resource - Names `__ - for more info. - asset_types (MutableSequence[str]): - A list of types of the assets to receive updates. You must - specify either or both of asset_names and asset_types. Only - asset updates matching specified asset_names or asset_types - are exported to the feed. For example: - "compute.googleapis.com/Disk" See `Introduction to Cloud - Asset - Inventory `__ - for all supported asset types. - content_type (google.cloud.asset_v1p2beta1.types.ContentType): - Asset content type. If not specified, no - content but the asset name and type will be - returned. - feed_output_config (google.cloud.asset_v1p2beta1.types.FeedOutputConfig): - Required. Feed output configuration defining - where the asset updates are published to. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - content_type: 'ContentType' = proto.Field( - proto.ENUM, - number=4, - enum='ContentType', - ) - feed_output_config: 'FeedOutputConfig' = proto.Field( - proto.MESSAGE, - number=5, - message='FeedOutputConfig', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/assets.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/assets.py deleted file mode 100644 index 2e31dced6469..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/google/cloud/asset_v1p2beta1/types/assets.py +++ /dev/null @@ -1,293 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import access_level_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import access_policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import service_perimeter_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1p2beta1', - manifest={ - 'TemporalAsset', - 'TimeWindow', - 'Asset', - 'Resource', - }, -) - - -class TemporalAsset(proto.Message): - r"""An asset in Google Cloud and its temporal metadata, including - the time window when it was observed and its status during that - window. - - Attributes: - window (google.cloud.asset_v1p2beta1.types.TimeWindow): - The time window when the asset data and state - was observed. - deleted (bool): - Whether the asset has been deleted or not. - asset (google.cloud.asset_v1p2beta1.types.Asset): - An asset in Google Cloud. - """ - - window: 'TimeWindow' = proto.Field( - proto.MESSAGE, - number=1, - message='TimeWindow', - ) - deleted: bool = proto.Field( - proto.BOOL, - number=2, - ) - asset: 'Asset' = proto.Field( - proto.MESSAGE, - number=3, - message='Asset', - ) - - -class TimeWindow(proto.Message): - r"""A time window specified by its ``start_time`` and ``end_time``. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start time of the time window (exclusive). - end_time (google.protobuf.timestamp_pb2.Timestamp): - End time of the time window (inclusive). If - not specified, the current timestamp is used - instead. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class Asset(proto.Message): - r"""An asset in Google Cloud. An asset can be any resource in the Google - Cloud `resource - hierarchy `__, - a resource outside the Google Cloud resource hierarchy (such as - Google Kubernetes Engine clusters and objects), or a policy (e.g. - IAM policy). See `Supported asset - types `__ - for more information. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The full name of the asset. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` - - See `Resource - names `__ - for more information. - asset_type (str): - The type of the asset. Example: - ``compute.googleapis.com/Disk`` - - See `Supported asset - types `__ - for more information. - resource (google.cloud.asset_v1p2beta1.types.Resource): - A representation of the resource. - iam_policy (google.iam.v1.policy_pb2.Policy): - A representation of the IAM policy set on a Google Cloud - resource. There can be a maximum of one IAM policy set on - any given resource. In addition, IAM policies inherit their - granted access scope from any policies set on parent - resources in the resource hierarchy. Therefore, the - effectively policy is the union of both the policy set on - this resource and each policy set on all of the resource's - ancestry resource levels in the hierarchy. See `this - topic `__ - for more information. - ancestors (MutableSequence[str]): - The ancestry path of an asset in Google Cloud `resource - hierarchy `__, - represented as a list of relative resource names. An - ancestry path starts with the closest ancestor in the - hierarchy and ends at root. If the asset is a project, - folder, or organization, the ancestry path starts from the - asset itself. - - Example: - ``["projects/123456789", "folders/5432", "organizations/1234"]`` - access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): - Please also refer to the `access policy user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): - Please also refer to the `access level user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): - Please also refer to the `service perimeter user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): - A representation of an `organization - policy `__. - There can be more than one organization policy with - different constraints set on a given resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=2, - ) - resource: 'Resource' = proto.Field( - proto.MESSAGE, - number=3, - message='Resource', - ) - iam_policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=4, - message=policy_pb2.Policy, - ) - ancestors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - access_policy: access_policy_pb2.AccessPolicy = proto.Field( - proto.MESSAGE, - number=7, - oneof='access_context_policy', - message=access_policy_pb2.AccessPolicy, - ) - access_level: access_level_pb2.AccessLevel = proto.Field( - proto.MESSAGE, - number=8, - oneof='access_context_policy', - message=access_level_pb2.AccessLevel, - ) - service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( - proto.MESSAGE, - number=9, - oneof='access_context_policy', - message=service_perimeter_pb2.ServicePerimeter, - ) - org_policy: MutableSequence[orgpolicy_pb2.Policy] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=orgpolicy_pb2.Policy, - ) - - -class Resource(proto.Message): - r"""A representation of a Google Cloud resource. - - Attributes: - version (str): - The API version. Example: ``v1`` - discovery_document_uri (str): - The URL of the discovery document containing the resource's - JSON schema. Example: - ``https://www.googleapis.com/discovery/v1/apis/compute/v1/rest`` - - This value is unspecified for resources that do not have an - API based on a discovery document, such as Cloud Bigtable. - discovery_name (str): - The JSON schema name listed in the discovery document. - Example: ``Project`` - - This value is unspecified for resources that do not have an - API based on a discovery document, such as Cloud Bigtable. - resource_url (str): - The REST URL for accessing the resource. An HTTP ``GET`` - request using this URL returns the resource itself. Example: - ``https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`` - - This value is unspecified for resources without a REST API. - parent (str): - The full name of the immediate parent of this resource. See - `Resource - Names `__ - for more information. - - For Google Cloud assets, this value is the parent resource - defined in the `IAM policy - hierarchy `__. - Example: - ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` - - For third-party assets, this field may be set differently. - data (google.protobuf.struct_pb2.Struct): - The content of the resource, in which some - sensitive fields are removed and may not be - present. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - discovery_document_uri: str = proto.Field( - proto.STRING, - number=2, - ) - discovery_name: str = proto.Field( - proto.STRING, - number=3, - ) - resource_url: str = proto.Field( - proto.STRING, - number=4, - ) - parent: str = proto.Field( - proto.STRING, - number=5, - ) - data: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/mypy.ini b/owl-bot-staging/google-cloud-asset/v1p2beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/noxfile.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/noxfile.py deleted file mode 100644 index 7210accb4196..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-asset' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1p2beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1p2beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_create_feed_async.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_create_feed_async.py deleted file mode 100644 index bafba2297b6d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_create_feed_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_CreateFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -async def sample_create_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = await client.create_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_CreateFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_create_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_create_feed_sync.py deleted file mode 100644 index c52adc79a285..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_create_feed_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_CreateFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -def sample_create_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.CreateFeedRequest( - parent="parent_value", - feed_id="feed_id_value", - feed=feed, - ) - - # Make the request - response = client.create_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_CreateFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_delete_feed_async.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_delete_feed_async.py deleted file mode 100644 index 7b106af74fdb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_delete_feed_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_DeleteFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -async def sample_delete_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - await client.delete_feed(request=request) - - -# [END cloudasset_v1p2beta1_generated_AssetService_DeleteFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_delete_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_delete_feed_sync.py deleted file mode 100644 index ca5d979e867a..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_delete_feed_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_DeleteFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -def sample_delete_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.DeleteFeedRequest( - name="name_value", - ) - - # Make the request - client.delete_feed(request=request) - - -# [END cloudasset_v1p2beta1_generated_AssetService_DeleteFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_get_feed_async.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_get_feed_async.py deleted file mode 100644 index e228bcbbb2bf..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_get_feed_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_GetFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -async def sample_get_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = await client.get_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_GetFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_get_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_get_feed_sync.py deleted file mode 100644 index 6fad51f45628..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_get_feed_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_GetFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -def sample_get_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.GetFeedRequest( - name="name_value", - ) - - # Make the request - response = client.get_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_GetFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_list_feeds_async.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_list_feeds_async.py deleted file mode 100644 index 8b8fee649b2e..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_list_feeds_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFeeds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_ListFeeds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -async def sample_list_feeds(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.list_feeds(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_ListFeeds_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_list_feeds_sync.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_list_feeds_sync.py deleted file mode 100644 index da3cfad5bc57..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_list_feeds_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListFeeds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_ListFeeds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -def sample_list_feeds(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p2beta1.ListFeedsRequest( - parent="parent_value", - ) - - # Make the request - response = client.list_feeds(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_ListFeeds_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_update_feed_async.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_update_feed_async.py deleted file mode 100644 index c12e104345dd..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_update_feed_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_UpdateFeed_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -async def sample_update_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = await client.update_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_UpdateFeed_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_update_feed_sync.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_update_feed_sync.py deleted file mode 100644 index 2630e88839f1..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/cloudasset_v1p2beta1_generated_asset_service_update_feed_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateFeed -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p2beta1_generated_AssetService_UpdateFeed_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p2beta1 - - -def sample_update_feed(): - # Create a client - client = asset_v1p2beta1.AssetServiceClient() - - # Initialize request argument(s) - feed = asset_v1p2beta1.Feed() - feed.name = "name_value" - - request = asset_v1p2beta1.UpdateFeedRequest( - feed=feed, - ) - - # Make the request - response = client.update_feed(request=request) - - # Handle the response - print(response) - -# [END cloudasset_v1p2beta1_generated_AssetService_UpdateFeed_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json deleted file mode 100644 index abb0badfbf70..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ /dev/null @@ -1,814 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.asset.v1p2beta1", - "version": "v1p2beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-asset", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient.create_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.CreateFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "CreateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.CreateFeedRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.Feed", - "shortName": "create_feed" - }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_create_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_CreateFeed_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_create_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient.create_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.CreateFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "CreateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.CreateFeedRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.Feed", - "shortName": "create_feed" - }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_create_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_CreateFeed_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_create_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient.delete_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.DeleteFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "DeleteFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.DeleteFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_feed" - }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_delete_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_DeleteFeed_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_delete_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient.delete_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.DeleteFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "DeleteFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.DeleteFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_feed" - }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_delete_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_DeleteFeed_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_delete_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient.get_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.GetFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "GetFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.GetFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.Feed", - "shortName": "get_feed" - }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_get_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_GetFeed_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_get_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient.get_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.GetFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "GetFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.GetFeedRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.Feed", - "shortName": "get_feed" - }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_get_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_GetFeed_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_get_feed_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient.list_feeds", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.ListFeeds", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListFeeds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.ListFeedsResponse", - "shortName": "list_feeds" - }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1p2beta1_generated_asset_service_list_feeds_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_ListFeeds_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_list_feeds_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient.list_feeds", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.ListFeeds", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListFeeds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.ListFeedsResponse", - "shortName": "list_feeds" - }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1p2beta1_generated_asset_service_list_feeds_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_ListFeeds_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_list_feeds_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceAsyncClient.update_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.UpdateFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "UpdateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.UpdateFeedRequest" - }, - { - "name": "feed", - "type": "google.cloud.asset_v1p2beta1.types.Feed" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.Feed", - "shortName": "update_feed" - }, - "description": "Sample for UpdateFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_update_feed_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_UpdateFeed_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_update_feed_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p2beta1.AssetServiceClient.update_feed", - "method": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService.UpdateFeed", - "service": { - "fullName": "google.cloud.asset.v1p2beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "UpdateFeed" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p2beta1.types.UpdateFeedRequest" - }, - { - "name": "feed", - "type": "google.cloud.asset_v1p2beta1.types.Feed" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p2beta1.types.Feed", - "shortName": "update_feed" - }, - "description": "Sample for UpdateFeed", - "file": "cloudasset_v1p2beta1_generated_asset_service_update_feed_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p2beta1_generated_AssetService_UpdateFeed_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p2beta1_generated_asset_service_update_feed_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/scripts/fixup_asset_v1p2beta1_keywords.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/scripts/fixup_asset_v1p2beta1_keywords.py deleted file mode 100644 index 9b99ac07fc5d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/scripts/fixup_asset_v1p2beta1_keywords.py +++ /dev/null @@ -1,180 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assetCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_feed': ('parent', 'feed_id', 'feed', ), - 'delete_feed': ('name', ), - 'get_feed': ('name', ), - 'list_feeds': ('parent', ), - 'update_feed': ('feed', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assetCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the asset client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/setup.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/setup.py deleted file mode 100644 index 481fb82feff0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/setup.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-asset' - - -description = "Google Cloud Asset API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.10.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.11.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.12.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.13.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.7.txt deleted file mode 100644 index 4bd37f5230a0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -google-cloud-access-context-manager==0.1.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.8.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.9.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/asset_v1p2beta1/__init__.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/asset_v1p2beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/asset_v1p2beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py b/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py deleted file mode 100644 index 7409005ce5b1..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p2beta1/tests/unit/gapic/asset_v1p2beta1/test_asset_service.py +++ /dev/null @@ -1,4978 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.asset_v1p2beta1.services.asset_service import AssetServiceAsyncClient -from google.cloud.asset_v1p2beta1.services.asset_service import AssetServiceClient -from google.cloud.asset_v1p2beta1.services.asset_service import transports -from google.cloud.asset_v1p2beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AssetServiceClient._get_default_mtls_endpoint(None) is None - assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssetServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AssetServiceClient._get_client_cert_source(None, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssetServiceGrpcTransport, "grpc"), - (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -def test_asset_service_client_get_transport_class(): - transport = AssetServiceClient.get_transport_class() - available_transports = [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceRestTransport, - ] - assert transport in available_transports - - transport = AssetServiceClient.get_transport_class("grpc") - assert transport == transports.AssetServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) -def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), -]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_asset_service_client_client_options_from_dict(): - with mock.patch('google.cloud.asset_v1p2beta1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AssetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - ) - response = client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.CreateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -def test_create_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', - ) - -def test_create_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - request = {} - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_rpc - - request = {} - await client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - )) - response = await client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.CreateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -@pytest.mark.asyncio -async def test_create_feed_async_from_dict(): - await test_create_feed_async(request_type=dict) - -def test_create_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.CreateFeedRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.CreateFeedRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - await client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_feed( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_create_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_feed( - asset_service.CreateFeedRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_create_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_feed( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_feed( - asset_service.CreateFeedRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - ) - response = client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.GetFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -def test_get_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.GetFeedRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest( - name='name_value', - ) - -def test_get_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc - request = {} - client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_rpc - - request = {} - await client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - )) - response = await client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.GetFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -@pytest.mark.asyncio -async def test_get_feed_async_from_dict(): - await test_get_feed_async(request_type=dict) - -def test_get_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.GetFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.GetFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - await client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_feed( - asset_service.GetFeedRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_feed( - asset_service.GetFeedRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse( - ) - response = client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.ListFeedsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - - -def test_list_feeds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.ListFeedsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_feeds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest( - parent='parent_value', - ) - -def test_list_feeds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_feeds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc - request = {} - client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_feeds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_feeds in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_rpc - - request = {} - await client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_feeds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) - response = await client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.ListFeedsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - - -@pytest.mark.asyncio -async def test_list_feeds_async_from_dict(): - await test_list_feeds_async(request_type=dict) - -def test_list_feeds_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListFeedsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = asset_service.ListFeedsResponse() - client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_feeds_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListFeedsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) - await client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_feeds_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_feeds( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_feeds_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_feeds_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListFeedsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_feeds( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_feeds_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - ) - response = client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.UpdateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -def test_update_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.UpdateFeedRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest( - ) - -def test_update_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc - request = {} - client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_rpc - - request = {} - await client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - )) - response = await client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.UpdateFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -@pytest.mark.asyncio -async def test_update_feed_async_from_dict(): - await test_update_feed_async(request_type=dict) - -def test_update_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.UpdateFeedRequest() - - request.feed.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.UpdateFeedRequest() - - request.feed.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - await client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'feed.name=name_value', - ) in kw['metadata'] - - -def test_update_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_feed( - feed=asset_service.Feed(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') - assert arg == mock_val - - -def test_update_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.Feed() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_feed( - feed=asset_service.Feed(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.DeleteFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.DeleteFeedRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_feed(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest( - name='name_value', - ) - -def test_delete_feed_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc - request = {} - client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_feed in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_rpc - - request = {} - await client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.DeleteFeedRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_feed_async_from_dict(): - await test_delete_feed_async(request_type=dict) - -def test_delete_feed_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.DeleteFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value = None - client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_feed_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.DeleteFeedRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_feed_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_feed_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_feed( - asset_service.DeleteFeedRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_feed_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_feed( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_feed_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_feed( - asset_service.DeleteFeedRequest(), - name='name_value', - ) - - -def test_create_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - - request = {} - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["feed_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) - - -def test_create_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p2beta1/{parent=*/*}/feeds" % client.transport._host, args[1]) - - -def test_create_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_feed( - asset_service.CreateFeedRequest(), - parent='parent_value', - ) - - -def test_get_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc - - request = {} - client.get_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p2beta1/{name=*/*/feeds/*}" % client.transport._host, args[1]) - - -def test_get_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_feed( - asset_service.GetFeedRequest(), - name='name_value', - ) - - -def test_list_feeds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_feeds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc - - request = {} - client.list_feeds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_feeds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_feeds(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_feeds_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_feeds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -def test_list_feeds_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_feeds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p2beta1/{parent=*/*}/feeds" % client.transport._host, args[1]) - - -def test_list_feeds_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_feeds( - asset_service.ListFeedsRequest(), - parent='parent_value', - ) - - -def test_update_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc - - request = {} - client.update_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) - - -def test_update_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() - - # get arguments that satisfy an http rule for this method - sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - feed=asset_service.Feed(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p2beta1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) - - -def test_update_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), - ) - - -def test_delete_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc - - request = {} - client.delete_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_feed(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_feed(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_feed(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p2beta1/{name=*/*/feeds/*}" % client.transport._host, args[1]) - - -def test_delete_feed_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_feed( - asset_service.DeleteFeedRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AssetServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AssetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AssetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.create_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.get_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_feeds_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value = asset_service.ListFeedsResponse() - client.list_feeds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListFeedsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value = asset_service.Feed() - client.update_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_feed_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value = None - client.delete_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteFeedRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - )) - await client.create_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - )) - await client.get_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_feeds_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) - await client.list_feeds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListFeedsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - )) - await client.update_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_feed_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteFeedRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AssetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.Feed.to_json(asset_service.Feed()) - req.return_value.content = return_value - - request = asset_service.CreateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - post_with_metadata.return_value = asset_service.Feed(), metadata - - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.Feed.to_json(asset_service.Feed()) - req.return_value.content = return_value - - request = asset_service.GetFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - post_with_metadata.return_value = asset_service.Feed(), metadata - - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_feeds(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_feeds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_feeds_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) - req.return_value.content = return_value - - request = asset_service.ListFeedsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListFeedsResponse() - post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata - - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.Feed.to_json(asset_service.Feed()) - req.return_value.content = return_value - - request = asset_service.UpdateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - post_with_metadata.return_value = asset_service.Feed(), metadata - - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_feed(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_feed(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = asset_service.DeleteFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_initialize_client_w_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - client.create_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.CreateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - client.get_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.GetFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_feeds_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - client.list_feeds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListFeedsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - client.update_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.UpdateFeedRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_feed_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - client.delete_feed(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.DeleteFeedRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AssetServiceGrpcTransport, - ) - -def test_asset_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_asset_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.asset_v1p2beta1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_feed', - 'get_feed', - 'list_feeds', - 'update_feed', - 'delete_feed', - 'get_operation', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_asset_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1p2beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_asset_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1p2beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport() - adc.assert_called_once() - - -def test_asset_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - ], -) -def test_asset_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, - ], -) -def test_asset_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AssetServiceGrpcTransport, grpc_helpers), - (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_asset_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_asset_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_no_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_with_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_asset_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AssetServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AssetServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_feed._session - session2 = client2.transport.create_feed._session - assert session1 != session2 - session1 = client1.transport.get_feed._session - session2 = client2.transport.get_feed._session - assert session1 != session2 - session1 = client1.transport.list_feeds._session - session2 = client2.transport.list_feeds._session - assert session1 != session2 - session1 = client1.transport.update_feed._session - session2 = client2.transport.update_feed._session - assert session1 != session2 - session1 = client1.transport.delete_feed._session - session2 = client2.transport.delete_feed._session - assert session1 != session2 -def test_asset_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_asset_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_feed_path(): - project = "squid" - feed = "clam" - expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) - actual = AssetServiceClient.feed_path(project, feed) - assert expected == actual - - -def test_parse_feed_path(): - expected = { - "project": "whelk", - "feed": "octopus", - } - path = AssetServiceClient.feed_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_feed_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AssetServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = AssetServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = AssetServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = AssetServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AssetServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = AssetServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = AssetServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = AssetServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AssetServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = AssetServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AssetServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_get_operation(transport: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/.coveragerc b/owl-bot-staging/google-cloud-asset/v1p5beta1/.coveragerc deleted file mode 100644 index 801f6d8a1da7..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/asset/__init__.py - google/cloud/asset/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/.flake8 b/owl-bot-staging/google-cloud-asset/v1p5beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/MANIFEST.in b/owl-bot-staging/google-cloud-asset/v1p5beta1/MANIFEST.in deleted file mode 100644 index 7000a398c787..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/asset *.py -recursive-include google/cloud/asset_v1p5beta1 *.py diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/README.rst b/owl-bot-staging/google-cloud-asset/v1p5beta1/README.rst deleted file mode 100644 index a10b3ef1e958..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Asset API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Asset API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/asset_service.rst b/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/asset_service.rst deleted file mode 100644 index fa5fcb62b09e..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/asset_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AssetService ------------------------------- - -.. automodule:: google.cloud.asset_v1p5beta1.services.asset_service - :members: - :inherited-members: - -.. automodule:: google.cloud.asset_v1p5beta1.services.asset_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/services_.rst b/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/services_.rst deleted file mode 100644 index 9383f20c8183..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Asset v1p5beta1 API -============================================= -.. toctree:: - :maxdepth: 2 - - asset_service diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/types_.rst b/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/types_.rst deleted file mode 100644 index 0d1431220d62..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/asset_v1p5beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Asset v1p5beta1 API -========================================== - -.. automodule:: google.cloud.asset_v1p5beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/conf.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/conf.py deleted file mode 100644 index fa7647914fb5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-asset documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-asset" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-asset-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-asset.tex", - u"google-cloud-asset Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-asset", - u"Google Cloud Asset Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-asset", - u"google-cloud-asset Documentation", - author, - "google-cloud-asset", - "GAPIC library for Google Cloud Asset API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/index.rst b/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/index.rst deleted file mode 100644 index e0d9690fc616..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - asset_v1p5beta1/services_ - asset_v1p5beta1/types_ diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/__init__.py deleted file mode 100644 index 7b0fc8ef5c3e..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.asset_v1p5beta1.services.asset_service.client import AssetServiceClient -from google.cloud.asset_v1p5beta1.services.asset_service.async_client import AssetServiceAsyncClient - -from google.cloud.asset_v1p5beta1.types.asset_service import ListAssetsRequest -from google.cloud.asset_v1p5beta1.types.asset_service import ListAssetsResponse -from google.cloud.asset_v1p5beta1.types.asset_service import ContentType -from google.cloud.asset_v1p5beta1.types.assets import Asset -from google.cloud.asset_v1p5beta1.types.assets import Resource - -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ContentType', - 'Asset', - 'Resource', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/py.typed b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/__init__.py deleted file mode 100644 index e7f1a5104c26..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.asset_v1p5beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.asset_service import AssetServiceClient -from .services.asset_service import AssetServiceAsyncClient - -from .types.asset_service import ListAssetsRequest -from .types.asset_service import ListAssetsResponse -from .types.asset_service import ContentType -from .types.assets import Asset -from .types.assets import Resource - -__all__ = ( - 'AssetServiceAsyncClient', -'Asset', -'AssetServiceClient', -'ContentType', -'ListAssetsRequest', -'ListAssetsResponse', -'Resource', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/gapic_metadata.json deleted file mode 100644 index 2c3088cd0ea6..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/gapic_metadata.json +++ /dev/null @@ -1,43 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.asset_v1p5beta1", - "protoPackage": "google.cloud.asset.v1p5beta1", - "schema": "1.0", - "services": { - "AssetService": { - "clients": { - "grpc": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "ListAssets": { - "methods": [ - "list_assets" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AssetServiceAsyncClient", - "rpcs": { - "ListAssets": { - "methods": [ - "list_assets" - ] - } - } - }, - "rest": { - "libraryClient": "AssetServiceClient", - "rpcs": { - "ListAssets": { - "methods": [ - "list_assets" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/gapic_version.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/py.typed b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/py.typed deleted file mode 100644 index 3dbb09a39130..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-asset package uses inline types. diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/__init__.py deleted file mode 100644 index 1ad75a011889..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AssetServiceClient -from .async_client import AssetServiceAsyncClient - -__all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py deleted file mode 100644 index 56cad908492c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py +++ /dev/null @@ -1,392 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.asset_v1p5beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.asset_v1p5beta1.services.asset_service import pagers -from google.cloud.asset_v1p5beta1.types import asset_service -from google.cloud.asset_v1p5beta1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .client import AssetServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AssetServiceAsyncClient: - """Asset service definition.""" - - _client: AssetServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AssetServiceClient._DEFAULT_UNIVERSE - - access_level_path = staticmethod(AssetServiceClient.access_level_path) - parse_access_level_path = staticmethod(AssetServiceClient.parse_access_level_path) - access_policy_path = staticmethod(AssetServiceClient.access_policy_path) - parse_access_policy_path = staticmethod(AssetServiceClient.parse_access_policy_path) - asset_path = staticmethod(AssetServiceClient.asset_path) - parse_asset_path = staticmethod(AssetServiceClient.parse_asset_path) - service_perimeter_path = staticmethod(AssetServiceClient.service_perimeter_path) - parse_service_perimeter_path = staticmethod(AssetServiceClient.parse_service_perimeter_path) - common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AssetServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AssetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AssetServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) - common_location_path = staticmethod(AssetServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceAsyncClient: The constructed client. - """ - return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AssetServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AssetServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1p5beta1.AssetServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "credentialsType": None, - } - ) - - async def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsAsyncPager: - r"""Lists assets with time and resource types and returns - paged results in response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p5beta1 - - async def sample_list_assets(): - # Create a client - client = asset_v1p5beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p5beta1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.asset_v1p5beta1.types.ListAssetsRequest, dict]]): - The request object. ListAssets request. - parent (:class:`str`): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" - (such as "projects/my-project-id"), or - "projects/[project-number]" (such as "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p5beta1.services.asset_service.pagers.ListAssetsAsyncPager: - ListAssets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListAssetsRequest): - request = asset_service.ListAssetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AssetServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/client.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/client.py deleted file mode 100644 index d937838bedc1..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/client.py +++ /dev/null @@ -1,784 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.asset_v1p5beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.asset_v1p5beta1.services.asset_service import pagers -from google.cloud.asset_v1p5beta1.types import asset_service -from google.cloud.asset_v1p5beta1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AssetServiceGrpcTransport -from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .transports.rest import AssetServiceRestTransport - - -class AssetServiceClientMeta(type): - """Metaclass for the AssetService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] - _transport_registry["grpc"] = AssetServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AssetServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AssetServiceClient(metaclass=AssetServiceClientMeta): - """Asset service definition.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "cloudasset.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AssetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssetServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def access_level_path(access_policy: str,access_level: str,) -> str: - """Returns a fully-qualified access_level string.""" - return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) - - @staticmethod - def parse_access_level_path(path: str) -> Dict[str,str]: - """Parses a access_level path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def access_policy_path(access_policy: str,) -> str: - """Returns a fully-qualified access_policy string.""" - return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) - - @staticmethod - def parse_access_policy_path(path: str) -> Dict[str,str]: - """Parses a access_policy path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def asset_path() -> str: - """Returns a fully-qualified asset string.""" - return "*".format() - - @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: - """Parses a asset path into its component segments.""" - m = re.match(r"^.*$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: - """Returns a fully-qualified service_perimeter string.""" - return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) - - @staticmethod - def parse_service_perimeter_path(path: str) -> Dict[str,str]: - """Parses a service_perimeter path into its component segments.""" - m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AssetServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AssetServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the asset service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() - self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AssetServiceTransport) - if transport_provided: - # transport is a AssetServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AssetServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( - AssetServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AssetServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.asset_v1p5beta1.AssetServiceClient`.", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "credentialsType": None, - } - ) - - def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsPager: - r"""Lists assets with time and resource types and returns - paged results in response. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import asset_v1p5beta1 - - def sample_list_assets(): - # Create a client - client = asset_v1p5beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p5beta1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.asset_v1p5beta1.types.ListAssetsRequest, dict]): - The request object. ListAssets request. - parent (str): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" - (such as "projects/my-project-id"), or - "projects/[project-number]" (such as "projects/12345"). - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.asset_v1p5beta1.services.asset_service.pagers.ListAssetsPager: - ListAssets response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, asset_service.ListAssetsRequest): - request = asset_service.ListAssetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_assets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AssetServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssetServiceClient", -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/pagers.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/pagers.py deleted file mode 100644 index 1ad7af531c25..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/pagers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.asset_v1p5beta1.types import asset_service -from google.cloud.asset_v1p5beta1.types import assets - - -class ListAssetsPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1p5beta1.types.ListAssetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1p5beta1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., asset_service.ListAssetsResponse], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1p5beta1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.asset_v1p5beta1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[asset_service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assets.Asset]: - for page in self.pages: - yield from page.assets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssetsAsyncPager: - """A pager for iterating through ``list_assets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.asset_v1p5beta1.types.ListAssetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssets`` requests and continue to iterate - through the ``assets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.asset_v1p5beta1.types.ListAssetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.asset_v1p5beta1.types.ListAssetsRequest): - The initial request object. - response (google.cloud.asset_v1p5beta1.types.ListAssetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = asset_service.ListAssetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assets.Asset]: - async def async_generator(): - async for page in self.pages: - for response in page.assets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/README.rst b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/README.rst deleted file mode 100644 index f0467812ea79..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AssetServiceTransport` is the ABC for all transports. -- public child `AssetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AssetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAssetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AssetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/__init__.py deleted file mode 100644 index 315eb22bd6cb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AssetServiceTransport -from .grpc import AssetServiceGrpcTransport -from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport -from .rest import AssetServiceRestTransport -from .rest import AssetServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport - -__all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/base.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/base.py deleted file mode 100644 index c0d85f7a8384..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/base.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.asset_v1p5beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.asset_v1p5beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AssetServiceTransport(abc.ABC): - """Abstract transport class for AssetService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'cloudasset.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_assets: gapic_v1.method.wrap_method( - self.list_assets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Union[ - asset_service.ListAssetsResponse, - Awaitable[asset_service.ListAssetsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AssetServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/grpc.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/grpc.py deleted file mode 100644 index 8c69579f1fff..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/grpc.py +++ /dev/null @@ -1,349 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.asset_v1p5beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcTransport(AssetServiceTransport): - """gRPC backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: - r"""Return a callable for the list assets method over gRPC. - - Lists assets with time and resource types and returns - paged results in response. - - Returns: - Callable[[~.ListAssetsRequest], - ~.ListAssetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p5beta1.AssetService/ListAssets', - request_serializer=asset_service.ListAssetsRequest.serialize, - response_deserializer=asset_service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AssetServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/grpc_asyncio.py deleted file mode 100644 index 2c5078a6ab86..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,379 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.asset_v1p5beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AssetServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): - """gRPC AsyncIO backend transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Awaitable[asset_service.ListAssetsResponse]]: - r"""Return a callable for the list assets method over gRPC. - - Lists assets with time and resource types and returns - paged results in response. - - Returns: - Callable[[~.ListAssetsRequest], - Awaitable[~.ListAssetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1p5beta1.AssetService/ListAssets', - request_serializer=asset_service.ListAssetsRequest.serialize, - response_deserializer=asset_service.ListAssetsResponse.deserialize, - ) - return self._stubs['list_assets'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_assets: self._wrap_method( - self.list_assets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py deleted file mode 100644 index 91e1f28c224c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest.py +++ /dev/null @@ -1,348 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.asset_v1p5beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAssetServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AssetServiceRestInterceptor: - """Interceptor for AssetService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AssetServiceRestTransport. - - .. code-block:: python - class MyCustomAssetServiceInterceptor(AssetServiceRestInterceptor): - def pre_list_assets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_assets(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) - client = AssetServiceClient(transport=transport) - - - """ - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_assets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssetService server. - """ - return request, metadata - - def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: - """Post-rpc interceptor for list_assets - - DEPRECATED. Please use the `post_list_assets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssetService server but before - it is returned to user code. This `post_list_assets` interceptor runs - before the `post_list_assets_with_metadata` interceptor. - """ - return response - - def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_assets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssetService server but before it is returned to user code. - - We recommend only using this `post_list_assets_with_metadata` - interceptor in new development instead of the `post_list_assets` interceptor. - When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the - `post_list_assets` interceptor. The (possibly modified) response returned by - `post_list_assets` will be passed to - `post_list_assets_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class AssetServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AssetServiceRestInterceptor - - -class AssetServiceRestTransport(_BaseAssetServiceRestTransport): - """REST backend synchronous transport for AssetService. - - Asset service definition. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AssetServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): - def __hash__(self): - return hash("AssetServiceRestTransport.ListAssets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListAssetsResponse: - r"""Call the list assets method over HTTP. - - Args: - request (~.asset_service.ListAssetsRequest): - The request object. ListAssets request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.asset_service.ListAssetsResponse: - ListAssets response. - """ - - http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() - - request, metadata = self._interceptor.pre_list_assets(request, metadata) - transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.asset_v1p5beta1.AssetServiceClient.ListAssets", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "rpcName": "ListAssets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = asset_service.ListAssetsResponse() - pb_resp = asset_service.ListAssetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_assets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = asset_service.ListAssetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.asset_v1p5beta1.AssetServiceClient.list_assets", - extra = { - "serviceName": "google.cloud.asset.v1p5beta1.AssetService", - "rpcName": "ListAssets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest_base.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest_base.py deleted file mode 100644 index 79eda73f2894..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/services/asset_service/transports/rest_base.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.asset_v1p5beta1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAssetServiceRestTransport(AssetServiceTransport): - """Base REST backend transport for AssetService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseListAssets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1p5beta1/{parent=*/*}/assets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = asset_service.ListAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseAssetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/__init__.py deleted file mode 100644 index db3e42eb5085..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .asset_service import ( - ListAssetsRequest, - ListAssetsResponse, - ContentType, -) -from .assets import ( - Asset, - Resource, -) - -__all__ = ( - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ContentType', - 'Asset', - 'Resource', -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/asset_service.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/asset_service.py deleted file mode 100644 index 12665e292a94..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/asset_service.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.asset_v1p5beta1.types import assets as gca_assets -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1p5beta1', - manifest={ - 'ContentType', - 'ListAssetsRequest', - 'ListAssetsResponse', - }, -) - - -class ContentType(proto.Enum): - r"""Asset content type. - - Values: - CONTENT_TYPE_UNSPECIFIED (0): - Unspecified content type. - RESOURCE (1): - Resource metadata. - IAM_POLICY (2): - The actual IAM policy set on a resource. - ORG_POLICY (4): - The organization policy set on an asset. - ACCESS_POLICY (5): - The Access Context Manager policy set on an - asset. - """ - CONTENT_TYPE_UNSPECIFIED = 0 - RESOURCE = 1 - IAM_POLICY = 2 - ORG_POLICY = 4 - ACCESS_POLICY = 5 - - -class ListAssetsRequest(proto.Message): - r"""ListAssets request. - - Attributes: - parent (str): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" (such - as "projects/my-project-id"), or "projects/[project-number]" - (such as "projects/12345"). - read_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp to take an asset snapshot. This can - only be set to a timestamp between the current - time and the current time minus 35 days - (inclusive). If not specified, the current time - will be used. Due to delays in resource data - collection and indexing, there is a volatile - window during which running the same query may - get different results. - asset_types (MutableSequence[str]): - A list of asset types to take a snapshot for. For example: - "compute.googleapis.com/Disk". - - Regular expression is also supported. For example: - - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". - - See `RE2 `__ for - all supported regular expression syntax. If the regular - expression does not match any supported asset type, an - INVALID_ARGUMENT error will be returned. - - If specified, only matching assets will be returned, - otherwise, it will snapshot all asset types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types. - content_type (google.cloud.asset_v1p5beta1.types.ContentType): - Asset content type. If not specified, no - content but the asset name will be returned. - page_size (int): - The maximum number of assets to be returned - in a single response. Default is 100, minimum is - 1, and maximum is 1000. - page_token (str): - The ``next_page_token`` returned from the previous - ``ListAssetsResponse``, or unspecified for the first - ``ListAssetsRequest``. It is a continuation of a prior - ``ListAssets`` call, and the API should return the next page - of assets. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - asset_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - content_type: 'ContentType' = proto.Field( - proto.ENUM, - number=4, - enum='ContentType', - ) - page_size: int = proto.Field( - proto.INT32, - number=5, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListAssetsResponse(proto.Message): - r"""ListAssets response. - - Attributes: - read_time (google.protobuf.timestamp_pb2.Timestamp): - Time the snapshot was taken. - assets (MutableSequence[google.cloud.asset_v1p5beta1.types.Asset]): - Assets. - next_page_token (str): - Token to retrieve the next page of results. - It expires 72 hours after the page token for the - first page is generated. Set to empty if there - are no remaining results. - """ - - @property - def raw_page(self): - return self - - read_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - assets: MutableSequence[gca_assets.Asset] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=gca_assets.Asset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/assets.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/assets.py deleted file mode 100644 index 79ca19b9c21d..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/google/cloud/asset_v1p5beta1/types/assets.py +++ /dev/null @@ -1,235 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import access_level_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import access_policy_pb2 # type: ignore -from google.identity.accesscontextmanager.v1 import service_perimeter_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.asset.v1p5beta1', - manifest={ - 'Asset', - 'Resource', - }, -) - - -class Asset(proto.Message): - r"""An asset in Google Cloud. An asset can be any resource in the Google - Cloud `resource - hierarchy `__, - a resource outside the Google Cloud resource hierarchy (such as - Google Kubernetes Engine clusters and objects), or a policy (e.g. - IAM policy). See `Supported asset - types `__ - for more information. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The full name of the asset. Example: - ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` - - See `Resource - names `__ - for more information. - asset_type (str): - The type of the asset. Example: - ``compute.googleapis.com/Disk`` - - See `Supported asset - types `__ - for more information. - resource (google.cloud.asset_v1p5beta1.types.Resource): - A representation of the resource. - iam_policy (google.iam.v1.policy_pb2.Policy): - A representation of the IAM policy set on a Google Cloud - resource. There can be a maximum of one IAM policy set on - any given resource. In addition, IAM policies inherit their - granted access scope from any policies set on parent - resources in the resource hierarchy. Therefore, the - effectively policy is the union of both the policy set on - this resource and each policy set on all of the resource's - ancestry resource levels in the hierarchy. See `this - topic `__ - for more information. - org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): - A representation of an `organization - policy `__. - There can be more than one organization policy with - different constraints set on a given resource. - access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): - Please also refer to the `access policy user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): - Please also refer to the `access level user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): - Please also refer to the `service perimeter user - guide `__. - - This field is a member of `oneof`_ ``access_context_policy``. - ancestors (MutableSequence[str]): - The ancestry path of an asset in Google Cloud `resource - hierarchy `__, - represented as a list of relative resource names. An - ancestry path starts with the closest ancestor in the - hierarchy and ends at root. If the asset is a project, - folder, or organization, the ancestry path starts from the - asset itself. - - Example: - ``["projects/123456789", "folders/5432", "organizations/1234"]`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: str = proto.Field( - proto.STRING, - number=2, - ) - resource: 'Resource' = proto.Field( - proto.MESSAGE, - number=3, - message='Resource', - ) - iam_policy: policy_pb2.Policy = proto.Field( - proto.MESSAGE, - number=4, - message=policy_pb2.Policy, - ) - org_policy: MutableSequence[orgpolicy_pb2.Policy] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=orgpolicy_pb2.Policy, - ) - access_policy: access_policy_pb2.AccessPolicy = proto.Field( - proto.MESSAGE, - number=7, - oneof='access_context_policy', - message=access_policy_pb2.AccessPolicy, - ) - access_level: access_level_pb2.AccessLevel = proto.Field( - proto.MESSAGE, - number=8, - oneof='access_context_policy', - message=access_level_pb2.AccessLevel, - ) - service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( - proto.MESSAGE, - number=9, - oneof='access_context_policy', - message=service_perimeter_pb2.ServicePerimeter, - ) - ancestors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=10, - ) - - -class Resource(proto.Message): - r"""A representation of a Google Cloud resource. - - Attributes: - version (str): - The API version. Example: "v1". - discovery_document_uri (str): - The URL of the discovery document containing the resource's - JSON schema. Example: - ``https://www.googleapis.com/discovery/v1/apis/compute/v1/rest`` - - This value is unspecified for resources that do not have an - API based on a discovery document, such as Cloud Bigtable. - discovery_name (str): - The JSON schema name listed in the discovery document. - Example: ``Project`` - - This value is unspecified for resources that do not have an - API based on a discovery document, such as Cloud Bigtable. - resource_url (str): - The REST URL for accessing the resource. An HTTP ``GET`` - request using this URL returns the resource itself. Example: - ``https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`` - - This value is unspecified for resources without a REST API. - parent (str): - The full name of the immediate parent of this resource. See - `Resource - Names `__ - for more information. - - For Google Cloud assets, this value is the parent resource - defined in the `IAM policy - hierarchy `__. - Example: - ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` - - For third-party assets, this field may be set differently. - data (google.protobuf.struct_pb2.Struct): - The content of the resource, in which some - sensitive fields are removed and may not be - present. - """ - - version: str = proto.Field( - proto.STRING, - number=1, - ) - discovery_document_uri: str = proto.Field( - proto.STRING, - number=2, - ) - discovery_name: str = proto.Field( - proto.STRING, - number=3, - ) - resource_url: str = proto.Field( - proto.STRING, - number=4, - ) - parent: str = proto.Field( - proto.STRING, - number=5, - ) - data: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/mypy.ini b/owl-bot-staging/google-cloud-asset/v1p5beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/noxfile.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/noxfile.py deleted file mode 100644 index 5069b9a84df3..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-asset' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1p5beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1p5beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/cloudasset_v1p5beta1_generated_asset_service_list_assets_async.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/cloudasset_v1p5beta1_generated_asset_service_list_assets_async.py deleted file mode 100644 index c569bc1474bb..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/cloudasset_v1p5beta1_generated_asset_service_list_assets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p5beta1_generated_AssetService_ListAssets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p5beta1 - - -async def sample_list_assets(): - # Create a client - client = asset_v1p5beta1.AssetServiceAsyncClient() - - # Initialize request argument(s) - request = asset_v1p5beta1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END cloudasset_v1p5beta1_generated_AssetService_ListAssets_async] diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/cloudasset_v1p5beta1_generated_asset_service_list_assets_sync.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/cloudasset_v1p5beta1_generated_asset_service_list_assets_sync.py deleted file mode 100644 index abcca11cee64..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/cloudasset_v1p5beta1_generated_asset_service_list_assets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-asset - - -# [START cloudasset_v1p5beta1_generated_AssetService_ListAssets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import asset_v1p5beta1 - - -def sample_list_assets(): - # Create a client - client = asset_v1p5beta1.AssetServiceClient() - - # Initialize request argument(s) - request = asset_v1p5beta1.ListAssetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END cloudasset_v1p5beta1_generated_AssetService_ListAssets_sync] diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json deleted file mode 100644 index 2b27b5681d7c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ /dev/null @@ -1,176 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.asset.v1p5beta1", - "version": "v1p5beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-asset", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.asset_v1p5beta1.AssetServiceAsyncClient", - "shortName": "AssetServiceAsyncClient" - }, - "fullName": "google.cloud.asset_v1p5beta1.AssetServiceAsyncClient.list_assets", - "method": { - "fullName": "google.cloud.asset.v1p5beta1.AssetService.ListAssets", - "service": { - "fullName": "google.cloud.asset.v1p5beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p5beta1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p5beta1.services.asset_service.pagers.ListAssetsAsyncPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1p5beta1_generated_asset_service_list_assets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p5beta1_generated_AssetService_ListAssets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p5beta1_generated_asset_service_list_assets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.asset_v1p5beta1.AssetServiceClient", - "shortName": "AssetServiceClient" - }, - "fullName": "google.cloud.asset_v1p5beta1.AssetServiceClient.list_assets", - "method": { - "fullName": "google.cloud.asset.v1p5beta1.AssetService.ListAssets", - "service": { - "fullName": "google.cloud.asset.v1p5beta1.AssetService", - "shortName": "AssetService" - }, - "shortName": "ListAssets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.asset_v1p5beta1.types.ListAssetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.asset_v1p5beta1.services.asset_service.pagers.ListAssetsPager", - "shortName": "list_assets" - }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1p5beta1_generated_asset_service_list_assets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1p5beta1_generated_AssetService_ListAssets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "cloudasset_v1p5beta1_generated_asset_service_list_assets_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/scripts/fixup_asset_v1p5beta1_keywords.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/scripts/fixup_asset_v1p5beta1_keywords.py deleted file mode 100644 index ddc57733a57c..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/scripts/fixup_asset_v1p5beta1_keywords.py +++ /dev/null @@ -1,176 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assetCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assetCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the asset client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/setup.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/setup.py deleted file mode 100644 index 481fb82feff0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/setup.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-asset' - - -description = "Google Cloud Asset API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.10.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.11.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.12.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.13.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.7.txt deleted file mode 100644 index 4bd37f5230a0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -google-cloud-access-context-manager==0.1.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.8.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.9.txt deleted file mode 100644 index 2b8305f11bf0..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/asset_v1p5beta1/__init__.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/asset_v1p5beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/asset_v1p5beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py b/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py deleted file mode 100644 index 4ad2efb9fe19..000000000000 --- a/owl-bot-staging/google-cloud-asset/v1p5beta1/tests/unit/gapic/asset_v1p5beta1/test_asset_service.py +++ /dev/null @@ -1,2451 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.asset_v1p5beta1.services.asset_service import AssetServiceAsyncClient -from google.cloud.asset_v1p5beta1.services.asset_service import AssetServiceClient -from google.cloud.asset_v1p5beta1.services.asset_service import pagers -from google.cloud.asset_v1p5beta1.services.asset_service import transports -from google.cloud.asset_v1p5beta1.types import asset_service -from google.cloud.asset_v1p5beta1.types import assets -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AssetServiceClient._get_default_mtls_endpoint(None) is None - assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssetServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AssetServiceClient._get_client_cert_source(None, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AssetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssetServiceGrpcTransport, "grpc"), - (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssetServiceClient, "grpc"), - (AssetServiceAsyncClient, "grpc_asyncio"), - (AssetServiceClient, "rest"), -]) -def test_asset_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://cloudasset.googleapis.com' - ) - - -def test_asset_service_client_get_transport_class(): - transport = AssetServiceClient.get_transport_class() - available_transports = [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceRestTransport, - ] - assert transport in available_transports - - transport = AssetServiceClient.get_transport_class("grpc") - assert transport == transports.AssetServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) -def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, AssetServiceAsyncClient -]) -@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) -def test_asset_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), -]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_asset_service_client_client_options_from_dict(): - with mock.patch('google.cloud.asset_v1p5beta1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AssetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets(request_type, transport: str = 'grpc'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = asset_service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_assets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_assets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_assets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc - - request = {} - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = asset_service.ListAssetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_assets_async_from_dict(): - await test_list_assets_async(request_type=dict) - -def test_list_assets_field_headers(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = asset_service.ListAssetsResponse() - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_assets_field_headers_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = asset_service.ListAssetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) - await client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_assets_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListAssetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_assets_flattened_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_assets_flattened_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = asset_service.ListAssetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_assets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_assets_flattened_error_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_pager(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_assets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) -def test_list_assets_pages(transport_name: str = "grpc"): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_assets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_assets_async_pager(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_assets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assets.Asset) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_assets_async_pages(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_assets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - - request = {} - client.list_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_assets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", )) & set(("parent", ))) - - -def test_list_assets_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_assets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1p5beta1/{parent=*/*}/assets" % client.transport._host, args[1]) - - -def test_list_assets_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) - - -def test_list_assets_rest_pager(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_assets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) - - pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AssetServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AssetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AssetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value = asset_service.ListAssetsResponse() - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListAssetsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_assets_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListAssetsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AssetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_assets(request) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets_rest_call_success(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) - req.return_value.content = return_value - - request = asset_service.ListAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListAssetsResponse() - post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata - - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assets_empty_call_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - client.list_assets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.ListAssetsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AssetServiceGrpcTransport, - ) - -def test_asset_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_asset_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.asset_v1p5beta1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_assets', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_asset_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1p5beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_asset_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1p5beta1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport() - adc.assert_called_once() - - -def test_asset_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - ], -) -def test_asset_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, - ], -) -def test_asset_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AssetServiceGrpcTransport, grpc_helpers), - (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_asset_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_asset_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_no_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_asset_service_host_with_port(transport_name): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'cloudasset.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://cloudasset.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_asset_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AssetServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AssetServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_assets._session - session2 = client2.transport.list_assets._session - assert session1 != session2 -def test_asset_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_asset_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) -def test_asset_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_access_level_path(): - access_policy = "squid" - access_level = "clam" - expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) - actual = AssetServiceClient.access_level_path(access_policy, access_level) - assert expected == actual - - -def test_parse_access_level_path(): - expected = { - "access_policy": "whelk", - "access_level": "octopus", - } - path = AssetServiceClient.access_level_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_access_level_path(path) - assert expected == actual - -def test_access_policy_path(): - access_policy = "oyster" - expected = "accessPolicies/{access_policy}".format(access_policy=access_policy, ) - actual = AssetServiceClient.access_policy_path(access_policy) - assert expected == actual - - -def test_parse_access_policy_path(): - expected = { - "access_policy": "nudibranch", - } - path = AssetServiceClient.access_policy_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_access_policy_path(path) - assert expected == actual - -def test_asset_path(): - expected = "*".format() - actual = AssetServiceClient.asset_path() - assert expected == actual - - -def test_parse_asset_path(): - expected = { - } - path = AssetServiceClient.asset_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_asset_path(path) - assert expected == actual - -def test_service_perimeter_path(): - access_policy = "cuttlefish" - service_perimeter = "mussel" - expected = "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) - actual = AssetServiceClient.service_perimeter_path(access_policy, service_perimeter) - assert expected == actual - - -def test_parse_service_perimeter_path(): - expected = { - "access_policy": "winkle", - "service_perimeter": "nautilus", - } - path = AssetServiceClient.service_perimeter_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_service_perimeter_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AssetServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = AssetServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = AssetServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = AssetServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AssetServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = AssetServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = AssetServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = AssetServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AssetServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = AssetServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AssetServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AssetServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/.coveragerc b/owl-bot-staging/google-cloud-assured-workloads/v1/.coveragerc deleted file mode 100644 index 74d92878ed60..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/assuredworkloads/__init__.py - google/cloud/assuredworkloads/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/.flake8 b/owl-bot-staging/google-cloud-assured-workloads/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/MANIFEST.in b/owl-bot-staging/google-cloud-assured-workloads/v1/MANIFEST.in deleted file mode 100644 index d24cce81d29a..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/assuredworkloads *.py -recursive-include google/cloud/assuredworkloads_v1 *.py diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/README.rst b/owl-bot-staging/google-cloud-assured-workloads/v1/README.rst deleted file mode 100644 index db9473b13ad5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Assuredworkloads API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Assuredworkloads API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-assured-workloads/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/assured_workloads_service.rst b/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/assured_workloads_service.rst deleted file mode 100644 index dd32bca5e24d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/assured_workloads_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AssuredWorkloadsService ------------------------------------------ - -.. automodule:: google.cloud.assuredworkloads_v1.services.assured_workloads_service - :members: - :inherited-members: - -.. automodule:: google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/services_.rst b/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/services_.rst deleted file mode 100644 index 18a2c7c84d8e..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Assuredworkloads v1 API -================================================= -.. toctree:: - :maxdepth: 2 - - assured_workloads_service diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/types_.rst b/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/types_.rst deleted file mode 100644 index 5ecad0d4d824..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/assuredworkloads_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Assuredworkloads v1 API -============================================== - -.. automodule:: google.cloud.assuredworkloads_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/conf.py b/owl-bot-staging/google-cloud-assured-workloads/v1/docs/conf.py deleted file mode 100644 index 470937920a65..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-assured-workloads documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-assured-workloads" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-assured-workloads-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-assured-workloads.tex", - u"google-cloud-assured-workloads Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-assured-workloads", - u"Google Cloud Assuredworkloads Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-assured-workloads", - u"google-cloud-assured-workloads Documentation", - author, - "google-cloud-assured-workloads", - "GAPIC library for Google Cloud Assuredworkloads API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/index.rst b/owl-bot-staging/google-cloud-assured-workloads/v1/docs/index.rst deleted file mode 100644 index 342ebcf1eace..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - assuredworkloads_v1/services_ - assuredworkloads_v1/types_ diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/__init__.py deleted file mode 100644 index 2dab5c973b0b..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.assuredworkloads import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.assuredworkloads_v1.services.assured_workloads_service.client import AssuredWorkloadsServiceClient -from google.cloud.assuredworkloads_v1.services.assured_workloads_service.async_client import AssuredWorkloadsServiceAsyncClient - -from google.cloud.assuredworkloads_v1.types.assuredworkloads import AcknowledgeViolationRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import AcknowledgeViolationResponse -from google.cloud.assuredworkloads_v1.types.assuredworkloads import CreateWorkloadOperationMetadata -from google.cloud.assuredworkloads_v1.types.assuredworkloads import CreateWorkloadRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import DeleteWorkloadRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import GetViolationRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import GetWorkloadRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import ListViolationsRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import ListViolationsResponse -from google.cloud.assuredworkloads_v1.types.assuredworkloads import ListWorkloadsRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import ListWorkloadsResponse -from google.cloud.assuredworkloads_v1.types.assuredworkloads import RestrictAllowedResourcesRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import RestrictAllowedResourcesResponse -from google.cloud.assuredworkloads_v1.types.assuredworkloads import TimeWindow -from google.cloud.assuredworkloads_v1.types.assuredworkloads import UpdateWorkloadRequest -from google.cloud.assuredworkloads_v1.types.assuredworkloads import Violation -from google.cloud.assuredworkloads_v1.types.assuredworkloads import Workload - -__all__ = ('AssuredWorkloadsServiceClient', - 'AssuredWorkloadsServiceAsyncClient', - 'AcknowledgeViolationRequest', - 'AcknowledgeViolationResponse', - 'CreateWorkloadOperationMetadata', - 'CreateWorkloadRequest', - 'DeleteWorkloadRequest', - 'GetViolationRequest', - 'GetWorkloadRequest', - 'ListViolationsRequest', - 'ListViolationsResponse', - 'ListWorkloadsRequest', - 'ListWorkloadsResponse', - 'RestrictAllowedResourcesRequest', - 'RestrictAllowedResourcesResponse', - 'TimeWindow', - 'UpdateWorkloadRequest', - 'Violation', - 'Workload', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/gapic_version.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/py.typed b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/py.typed deleted file mode 100644 index 3762b50eeb3d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-assured-workloads package uses inline types. diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/__init__.py deleted file mode 100644 index bf6d7bec0987..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.assuredworkloads_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.assured_workloads_service import AssuredWorkloadsServiceClient -from .services.assured_workloads_service import AssuredWorkloadsServiceAsyncClient - -from .types.assuredworkloads import AcknowledgeViolationRequest -from .types.assuredworkloads import AcknowledgeViolationResponse -from .types.assuredworkloads import CreateWorkloadOperationMetadata -from .types.assuredworkloads import CreateWorkloadRequest -from .types.assuredworkloads import DeleteWorkloadRequest -from .types.assuredworkloads import GetViolationRequest -from .types.assuredworkloads import GetWorkloadRequest -from .types.assuredworkloads import ListViolationsRequest -from .types.assuredworkloads import ListViolationsResponse -from .types.assuredworkloads import ListWorkloadsRequest -from .types.assuredworkloads import ListWorkloadsResponse -from .types.assuredworkloads import RestrictAllowedResourcesRequest -from .types.assuredworkloads import RestrictAllowedResourcesResponse -from .types.assuredworkloads import TimeWindow -from .types.assuredworkloads import UpdateWorkloadRequest -from .types.assuredworkloads import Violation -from .types.assuredworkloads import Workload - -__all__ = ( - 'AssuredWorkloadsServiceAsyncClient', -'AcknowledgeViolationRequest', -'AcknowledgeViolationResponse', -'AssuredWorkloadsServiceClient', -'CreateWorkloadOperationMetadata', -'CreateWorkloadRequest', -'DeleteWorkloadRequest', -'GetViolationRequest', -'GetWorkloadRequest', -'ListViolationsRequest', -'ListViolationsResponse', -'ListWorkloadsRequest', -'ListWorkloadsResponse', -'RestrictAllowedResourcesRequest', -'RestrictAllowedResourcesResponse', -'TimeWindow', -'UpdateWorkloadRequest', -'Violation', -'Workload', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/gapic_metadata.json deleted file mode 100644 index 6cd64bb258dd..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/gapic_metadata.json +++ /dev/null @@ -1,163 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.assuredworkloads_v1", - "protoPackage": "google.cloud.assuredworkloads.v1", - "schema": "1.0", - "services": { - "AssuredWorkloadsService": { - "clients": { - "grpc": { - "libraryClient": "AssuredWorkloadsServiceClient", - "rpcs": { - "AcknowledgeViolation": { - "methods": [ - "acknowledge_violation" - ] - }, - "CreateWorkload": { - "methods": [ - "create_workload" - ] - }, - "DeleteWorkload": { - "methods": [ - "delete_workload" - ] - }, - "GetViolation": { - "methods": [ - "get_violation" - ] - }, - "GetWorkload": { - "methods": [ - "get_workload" - ] - }, - "ListViolations": { - "methods": [ - "list_violations" - ] - }, - "ListWorkloads": { - "methods": [ - "list_workloads" - ] - }, - "RestrictAllowedResources": { - "methods": [ - "restrict_allowed_resources" - ] - }, - "UpdateWorkload": { - "methods": [ - "update_workload" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AssuredWorkloadsServiceAsyncClient", - "rpcs": { - "AcknowledgeViolation": { - "methods": [ - "acknowledge_violation" - ] - }, - "CreateWorkload": { - "methods": [ - "create_workload" - ] - }, - "DeleteWorkload": { - "methods": [ - "delete_workload" - ] - }, - "GetViolation": { - "methods": [ - "get_violation" - ] - }, - "GetWorkload": { - "methods": [ - "get_workload" - ] - }, - "ListViolations": { - "methods": [ - "list_violations" - ] - }, - "ListWorkloads": { - "methods": [ - "list_workloads" - ] - }, - "RestrictAllowedResources": { - "methods": [ - "restrict_allowed_resources" - ] - }, - "UpdateWorkload": { - "methods": [ - "update_workload" - ] - } - } - }, - "rest": { - "libraryClient": "AssuredWorkloadsServiceClient", - "rpcs": { - "AcknowledgeViolation": { - "methods": [ - "acknowledge_violation" - ] - }, - "CreateWorkload": { - "methods": [ - "create_workload" - ] - }, - "DeleteWorkload": { - "methods": [ - "delete_workload" - ] - }, - "GetViolation": { - "methods": [ - "get_violation" - ] - }, - "GetWorkload": { - "methods": [ - "get_workload" - ] - }, - "ListViolations": { - "methods": [ - "list_violations" - ] - }, - "ListWorkloads": { - "methods": [ - "list_workloads" - ] - }, - "RestrictAllowedResources": { - "methods": [ - "restrict_allowed_resources" - ] - }, - "UpdateWorkload": { - "methods": [ - "update_workload" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/gapic_version.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/py.typed b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/py.typed deleted file mode 100644 index 3762b50eeb3d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-assured-workloads package uses inline types. diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/__init__.py deleted file mode 100644 index ba20e950ce7c..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AssuredWorkloadsServiceClient -from .async_client import AssuredWorkloadsServiceAsyncClient - -__all__ = ( - 'AssuredWorkloadsServiceClient', - 'AssuredWorkloadsServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py deleted file mode 100644 index 07f65372d2cf..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py +++ /dev/null @@ -1,1349 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.assuredworkloads_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.assuredworkloads_v1.services.assured_workloads_service import pagers -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport -from .client import AssuredWorkloadsServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AssuredWorkloadsServiceAsyncClient: - """Service to manage AssuredWorkloads.""" - - _client: AssuredWorkloadsServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - - violation_path = staticmethod(AssuredWorkloadsServiceClient.violation_path) - parse_violation_path = staticmethod(AssuredWorkloadsServiceClient.parse_violation_path) - workload_path = staticmethod(AssuredWorkloadsServiceClient.workload_path) - parse_workload_path = staticmethod(AssuredWorkloadsServiceClient.parse_workload_path) - common_billing_account_path = staticmethod(AssuredWorkloadsServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AssuredWorkloadsServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AssuredWorkloadsServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AssuredWorkloadsServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_project_path) - common_location_path = staticmethod(AssuredWorkloadsServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceAsyncClient: The constructed client. - """ - return AssuredWorkloadsServiceClient.from_service_account_info.__func__(AssuredWorkloadsServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceAsyncClient: The constructed client. - """ - return AssuredWorkloadsServiceClient.from_service_account_file.__func__(AssuredWorkloadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AssuredWorkloadsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AssuredWorkloadsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssuredWorkloadsServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AssuredWorkloadsServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssuredWorkloadsServiceTransport, Callable[..., AssuredWorkloadsServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the assured workloads service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssuredWorkloadsServiceTransport,Callable[..., AssuredWorkloadsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssuredWorkloadsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AssuredWorkloadsServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "credentialsType": None, - } - ) - - async def create_workload(self, - request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, - *, - parent: Optional[str] = None, - workload: Optional[assuredworkloads.Workload] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates Assured Workload. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_create_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.CreateWorkloadRequest, dict]]): - The request object. Request for creating a workload. - parent (:class:`str`): - Required. The resource name of the new Workload's - parent. Must be of the form - ``organizations/{org_id}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - workload (:class:`google.cloud.assuredworkloads_v1.types.Workload`): - Required. Assured Workload to create - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.assuredworkloads_v1.types.Workload` A Workload object for managing highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, workload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.CreateWorkloadRequest): - request = assuredworkloads.CreateWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if workload is not None: - request.workload = workload - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - assuredworkloads.Workload, - metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_workload(self, - request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, - *, - workload: Optional[assuredworkloads.Workload] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_update_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = await client.update_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.UpdateWorkloadRequest, dict]]): - The request object. Request for Updating a workload. - workload (:class:`google.cloud.assuredworkloads_v1.types.Workload`): - Required. The workload to update. The workload's - ``name`` field is used to identify the workload to be - updated. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.Workload: - A Workload object for managing highly - regulated workloads of cloud customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [workload, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.UpdateWorkloadRequest): - request = assuredworkloads.UpdateWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if workload is not None: - request.workload = workload - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("workload.name", request.workload.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def restrict_allowed_resources(self, - request: Optional[Union[assuredworkloads.RestrictAllowedResourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.RestrictAllowedResourcesResponse: - r"""Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = await client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest, dict]]): - The request object. Request for restricting list of - available resources in Workload - environment. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesResponse: - Response for restricting the list of - allowed resources. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.RestrictAllowedResourcesRequest): - request = assuredworkloads.RestrictAllowedResourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.restrict_allowed_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_workload(self, - request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - await client.delete_workload(request=request) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.DeleteWorkloadRequest, dict]]): - The request object. Request for deleting a Workload. - name (:class:`str`): - Required. The ``name`` field is used to identify the - workload. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.DeleteWorkloadRequest): - request = assuredworkloads.DeleteWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_workload(self, - request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Gets Assured Workload associated with a CRM Node - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_get_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = await client.get_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.GetWorkloadRequest, dict]]): - The request object. Request for fetching a workload. - name (:class:`str`): - Required. The resource name of the Workload to fetch. - This is the workload's relative path in the API, - formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.Workload: - A Workload object for managing highly - regulated workloads of cloud customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.GetWorkloadRequest): - request = assuredworkloads.GetWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_workloads(self, - request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListWorkloadsAsyncPager: - r"""Lists Assured Workloads under a CRM Node. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest, dict]]): - The request object. Request for fetching workloads in an - organization. - parent (:class:`str`): - Required. Parent Resource to list workloads from. Must - be of the form - ``organizations/{org_id}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager: - Response of ListWorkloads endpoint. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.ListWorkloadsRequest): - request = assuredworkloads.ListWorkloadsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_workloads] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListWorkloadsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_violations(self, - request: Optional[Union[assuredworkloads.ListViolationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViolationsAsyncPager: - r"""Lists the Violations in the AssuredWorkload Environment. Callers - may also choose to read across multiple Workloads as per - `AIP-159 `__ by using '-' (the - hyphen or dash character) as a wildcard character instead of - workload-id in the parent. Format - ``organizations/{org_id}/locations/{location}/workloads/-`` - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_list_violations(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListViolationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_violations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.ListViolationsRequest, dict]]): - The request object. Request for fetching violations in an - organization. - parent (:class:`str`): - Required. The Workload name. Format - ``organizations/{org_id}/locations/{location}/workloads/{workload}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListViolationsAsyncPager: - Response of ListViolations endpoint. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.ListViolationsRequest): - request = assuredworkloads.ListViolationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_violations] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListViolationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_violation(self, - request: Optional[Union[assuredworkloads.GetViolationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Violation: - r"""Retrieves Assured Workload Violation based on ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_get_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetViolationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_violation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.GetViolationRequest, dict]]): - The request object. Request for fetching a Workload - Violation. - name (:class:`str`): - Required. The resource name of the - Violation to fetch (ie. Violation.name). - Format: - - organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.Violation: - Workload monitoring Violation. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.GetViolationRequest): - request = assuredworkloads.GetViolationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_violation] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def acknowledge_violation(self, - request: Optional[Union[assuredworkloads.AcknowledgeViolationRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.AcknowledgeViolationResponse: - r"""Acknowledges an existing violation. By acknowledging - a violation, users acknowledge the existence of a - compliance violation in their workload and decide to - ignore it due to a valid business justification. - Acknowledgement is a permanent operation and it cannot - be reverted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - async def sample_acknowledge_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.AcknowledgeViolationRequest( - name="name_value", - comment="comment_value", - ) - - # Make the request - response = await client.acknowledge_violation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1.types.AcknowledgeViolationRequest, dict]]): - The request object. Request for acknowledging the - violation Next Id: 4 - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.AcknowledgeViolationResponse: - Response for violation - acknowledgement - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.AcknowledgeViolationRequest): - request = assuredworkloads.AcknowledgeViolationRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.acknowledge_violation] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AssuredWorkloadsServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssuredWorkloadsServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py deleted file mode 100644 index f6cafd36355e..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py +++ /dev/null @@ -1,1723 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.assuredworkloads_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.assuredworkloads_v1.services.assured_workloads_service import pagers -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AssuredWorkloadsServiceGrpcTransport -from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport -from .transports.rest import AssuredWorkloadsServiceRestTransport - - -class AssuredWorkloadsServiceClientMeta(type): - """Metaclass for the AssuredWorkloadsService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] - _transport_registry["grpc"] = AssuredWorkloadsServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AssuredWorkloadsServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AssuredWorkloadsServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssuredWorkloadsServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AssuredWorkloadsServiceClient(metaclass=AssuredWorkloadsServiceClientMeta): - """Service to manage AssuredWorkloads.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "assuredworkloads.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "assuredworkloads.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AssuredWorkloadsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssuredWorkloadsServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def violation_path(organization: str,location: str,workload: str,violation: str,) -> str: - """Returns a fully-qualified violation string.""" - return "organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation}".format(organization=organization, location=location, workload=workload, violation=violation, ) - - @staticmethod - def parse_violation_path(path: str) -> Dict[str,str]: - """Parses a violation path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/workloads/(?P.+?)/violations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def workload_path(organization: str,location: str,workload: str,) -> str: - """Returns a fully-qualified workload string.""" - return "organizations/{organization}/locations/{location}/workloads/{workload}".format(organization=organization, location=location, workload=workload, ) - - @staticmethod - def parse_workload_path(path: str) -> Dict[str,str]: - """Parses a workload path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/workloads/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssuredWorkloadsServiceTransport, Callable[..., AssuredWorkloadsServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the assured workloads service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssuredWorkloadsServiceTransport,Callable[..., AssuredWorkloadsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssuredWorkloadsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssuredWorkloadsServiceClient._read_environment_variables() - self._client_cert_source = AssuredWorkloadsServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssuredWorkloadsServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AssuredWorkloadsServiceTransport) - if transport_provided: - # transport is a AssuredWorkloadsServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AssuredWorkloadsServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AssuredWorkloadsServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AssuredWorkloadsServiceTransport], Callable[..., AssuredWorkloadsServiceTransport]] = ( - AssuredWorkloadsServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AssuredWorkloadsServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient`.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "credentialsType": None, - } - ) - - def create_workload(self, - request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, - *, - parent: Optional[str] = None, - workload: Optional[assuredworkloads.Workload] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates Assured Workload. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_create_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.CreateWorkloadRequest, dict]): - The request object. Request for creating a workload. - parent (str): - Required. The resource name of the new Workload's - parent. Must be of the form - ``organizations/{org_id}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - workload (google.cloud.assuredworkloads_v1.types.Workload): - Required. Assured Workload to create - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.assuredworkloads_v1.types.Workload` A Workload object for managing highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, workload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.CreateWorkloadRequest): - request = assuredworkloads.CreateWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if workload is not None: - request.workload = workload - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - assuredworkloads.Workload, - metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, - ) - - # Done; return the response. - return response - - def update_workload(self, - request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, - *, - workload: Optional[assuredworkloads.Workload] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_update_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = client.update_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.UpdateWorkloadRequest, dict]): - The request object. Request for Updating a workload. - workload (google.cloud.assuredworkloads_v1.types.Workload): - Required. The workload to update. The workload's - ``name`` field is used to identify the workload to be - updated. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.Workload: - A Workload object for managing highly - regulated workloads of cloud customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [workload, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.UpdateWorkloadRequest): - request = assuredworkloads.UpdateWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if workload is not None: - request.workload = workload - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("workload.name", request.workload.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def restrict_allowed_resources(self, - request: Optional[Union[assuredworkloads.RestrictAllowedResourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.RestrictAllowedResourcesResponse: - r"""Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest, dict]): - The request object. Request for restricting list of - available resources in Workload - environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesResponse: - Response for restricting the list of - allowed resources. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.RestrictAllowedResourcesRequest): - request = assuredworkloads.RestrictAllowedResourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restrict_allowed_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_workload(self, - request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - client.delete_workload(request=request) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.DeleteWorkloadRequest, dict]): - The request object. Request for deleting a Workload. - name (str): - Required. The ``name`` field is used to identify the - workload. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.DeleteWorkloadRequest): - request = assuredworkloads.DeleteWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_workload(self, - request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Gets Assured Workload associated with a CRM Node - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_get_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = client.get_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.GetWorkloadRequest, dict]): - The request object. Request for fetching a workload. - name (str): - Required. The resource name of the Workload to fetch. - This is the workload's relative path in the API, - formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.Workload: - A Workload object for managing highly - regulated workloads of cloud customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.GetWorkloadRequest): - request = assuredworkloads.GetWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_workloads(self, - request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListWorkloadsPager: - r"""Lists Assured Workloads under a CRM Node. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest, dict]): - The request object. Request for fetching workloads in an - organization. - parent (str): - Required. Parent Resource to list workloads from. Must - be of the form - ``organizations/{org_id}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListWorkloadsPager: - Response of ListWorkloads endpoint. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.ListWorkloadsRequest): - request = assuredworkloads.ListWorkloadsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_workloads] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListWorkloadsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_violations(self, - request: Optional[Union[assuredworkloads.ListViolationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViolationsPager: - r"""Lists the Violations in the AssuredWorkload Environment. Callers - may also choose to read across multiple Workloads as per - `AIP-159 `__ by using '-' (the - hyphen or dash character) as a wildcard character instead of - workload-id in the parent. Format - ``organizations/{org_id}/locations/{location}/workloads/-`` - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_list_violations(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListViolationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_violations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.ListViolationsRequest, dict]): - The request object. Request for fetching violations in an - organization. - parent (str): - Required. The Workload name. Format - ``organizations/{org_id}/locations/{location}/workloads/{workload}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListViolationsPager: - Response of ListViolations endpoint. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.ListViolationsRequest): - request = assuredworkloads.ListViolationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_violations] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListViolationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_violation(self, - request: Optional[Union[assuredworkloads.GetViolationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Violation: - r"""Retrieves Assured Workload Violation based on ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_get_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetViolationRequest( - name="name_value", - ) - - # Make the request - response = client.get_violation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.GetViolationRequest, dict]): - The request object. Request for fetching a Workload - Violation. - name (str): - Required. The resource name of the - Violation to fetch (ie. Violation.name). - Format: - - organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.Violation: - Workload monitoring Violation. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.GetViolationRequest): - request = assuredworkloads.GetViolationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_violation] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def acknowledge_violation(self, - request: Optional[Union[assuredworkloads.AcknowledgeViolationRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.AcknowledgeViolationResponse: - r"""Acknowledges an existing violation. By acknowledging - a violation, users acknowledge the existence of a - compliance violation in their workload and decide to - ignore it due to a valid business justification. - Acknowledgement is a permanent operation and it cannot - be reverted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1 - - def sample_acknowledge_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.AcknowledgeViolationRequest( - name="name_value", - comment="comment_value", - ) - - # Make the request - response = client.acknowledge_violation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1.types.AcknowledgeViolationRequest, dict]): - The request object. Request for acknowledging the - violation Next Id: 4 - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1.types.AcknowledgeViolationResponse: - Response for violation - acknowledgement - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.AcknowledgeViolationRequest): - request = assuredworkloads.AcknowledgeViolationRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.acknowledge_violation] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AssuredWorkloadsServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssuredWorkloadsServiceClient", -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/pagers.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/pagers.py deleted file mode 100644 index aca1261891e7..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/pagers.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.assuredworkloads_v1.types import assuredworkloads - - -class ListWorkloadsPager: - """A pager for iterating through ``list_workloads`` requests. - - This class thinly wraps an initial - :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``workloads`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListWorkloads`` requests and continue to iterate - through the ``workloads`` field on the - corresponding responses. - - All the usual :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., assuredworkloads.ListWorkloadsResponse], - request: assuredworkloads.ListWorkloadsRequest, - response: assuredworkloads.ListWorkloadsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest): - The initial request object. - response (google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = assuredworkloads.ListWorkloadsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[assuredworkloads.ListWorkloadsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assuredworkloads.Workload]: - for page in self.pages: - yield from page.workloads - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkloadsAsyncPager: - """A pager for iterating through ``list_workloads`` requests. - - This class thinly wraps an initial - :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``workloads`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListWorkloads`` requests and continue to iterate - through the ``workloads`` field on the - corresponding responses. - - All the usual :class:`google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[assuredworkloads.ListWorkloadsResponse]], - request: assuredworkloads.ListWorkloadsRequest, - response: assuredworkloads.ListWorkloadsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest): - The initial request object. - response (google.cloud.assuredworkloads_v1.types.ListWorkloadsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = assuredworkloads.ListWorkloadsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[assuredworkloads.ListWorkloadsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assuredworkloads.Workload]: - async def async_generator(): - async for page in self.pages: - for response in page.workloads: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListViolationsPager: - """A pager for iterating through ``list_violations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``violations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListViolations`` requests and continue to iterate - through the ``violations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., assuredworkloads.ListViolationsResponse], - request: assuredworkloads.ListViolationsRequest, - response: assuredworkloads.ListViolationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.assuredworkloads_v1.types.ListViolationsRequest): - The initial request object. - response (google.cloud.assuredworkloads_v1.types.ListViolationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = assuredworkloads.ListViolationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[assuredworkloads.ListViolationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assuredworkloads.Violation]: - for page in self.pages: - yield from page.violations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListViolationsAsyncPager: - """A pager for iterating through ``list_violations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``violations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListViolations`` requests and continue to iterate - through the ``violations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.assuredworkloads_v1.types.ListViolationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[assuredworkloads.ListViolationsResponse]], - request: assuredworkloads.ListViolationsRequest, - response: assuredworkloads.ListViolationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.assuredworkloads_v1.types.ListViolationsRequest): - The initial request object. - response (google.cloud.assuredworkloads_v1.types.ListViolationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = assuredworkloads.ListViolationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[assuredworkloads.ListViolationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assuredworkloads.Violation]: - async def async_generator(): - async for page in self.pages: - for response in page.violations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/README.rst b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/README.rst deleted file mode 100644 index 30f56618bd12..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AssuredWorkloadsServiceTransport` is the ABC for all transports. -- public child `AssuredWorkloadsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AssuredWorkloadsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAssuredWorkloadsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AssuredWorkloadsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/__init__.py deleted file mode 100644 index 175c162f0257..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AssuredWorkloadsServiceTransport -from .grpc import AssuredWorkloadsServiceGrpcTransport -from .grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport -from .rest import AssuredWorkloadsServiceRestTransport -from .rest import AssuredWorkloadsServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] -_transport_registry['grpc'] = AssuredWorkloadsServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssuredWorkloadsServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssuredWorkloadsServiceRestTransport - -__all__ = ( - 'AssuredWorkloadsServiceTransport', - 'AssuredWorkloadsServiceGrpcTransport', - 'AssuredWorkloadsServiceGrpcAsyncIOTransport', - 'AssuredWorkloadsServiceRestTransport', - 'AssuredWorkloadsServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/base.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/base.py deleted file mode 100644 index c7ce79d1bdc9..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/base.py +++ /dev/null @@ -1,302 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.assuredworkloads_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AssuredWorkloadsServiceTransport(abc.ABC): - """Abstract transport class for AssuredWorkloadsService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'assuredworkloads.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_workload: gapic_v1.method.wrap_method( - self.create_workload, - default_timeout=None, - client_info=client_info, - ), - self.update_workload: gapic_v1.method.wrap_method( - self.update_workload, - default_timeout=None, - client_info=client_info, - ), - self.restrict_allowed_resources: gapic_v1.method.wrap_method( - self.restrict_allowed_resources, - default_timeout=None, - client_info=client_info, - ), - self.delete_workload: gapic_v1.method.wrap_method( - self.delete_workload, - default_timeout=None, - client_info=client_info, - ), - self.get_workload: gapic_v1.method.wrap_method( - self.get_workload, - default_timeout=None, - client_info=client_info, - ), - self.list_workloads: gapic_v1.method.wrap_method( - self.list_workloads, - default_timeout=None, - client_info=client_info, - ), - self.list_violations: gapic_v1.method.wrap_method( - self.list_violations, - default_timeout=None, - client_info=client_info, - ), - self.get_violation: gapic_v1.method.wrap_method( - self.get_violation, - default_timeout=None, - client_info=client_info, - ), - self.acknowledge_violation: gapic_v1.method.wrap_method( - self.acknowledge_violation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - Union[ - assuredworkloads.Workload, - Awaitable[assuredworkloads.Workload] - ]]: - raise NotImplementedError() - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - Union[ - assuredworkloads.RestrictAllowedResourcesResponse, - Awaitable[assuredworkloads.RestrictAllowedResourcesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - Union[ - assuredworkloads.Workload, - Awaitable[assuredworkloads.Workload] - ]]: - raise NotImplementedError() - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - Union[ - assuredworkloads.ListWorkloadsResponse, - Awaitable[assuredworkloads.ListWorkloadsResponse] - ]]: - raise NotImplementedError() - - @property - def list_violations(self) -> Callable[ - [assuredworkloads.ListViolationsRequest], - Union[ - assuredworkloads.ListViolationsResponse, - Awaitable[assuredworkloads.ListViolationsResponse] - ]]: - raise NotImplementedError() - - @property - def get_violation(self) -> Callable[ - [assuredworkloads.GetViolationRequest], - Union[ - assuredworkloads.Violation, - Awaitable[assuredworkloads.Violation] - ]]: - raise NotImplementedError() - - @property - def acknowledge_violation(self) -> Callable[ - [assuredworkloads.AcknowledgeViolationRequest], - Union[ - assuredworkloads.AcknowledgeViolationResponse, - Awaitable[assuredworkloads.AcknowledgeViolationResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AssuredWorkloadsServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc.py deleted file mode 100644 index 3534e0b1e170..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc.py +++ /dev/null @@ -1,633 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssuredWorkloadsServiceGrpcTransport(AssuredWorkloadsServiceTransport): - """gRPC backend transport for AssuredWorkloadsService. - - Service to manage AssuredWorkloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - operations_pb2.Operation]: - r"""Return a callable for the create workload method over gRPC. - - Creates Assured Workload. - - Returns: - Callable[[~.CreateWorkloadRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_workload' not in self._stubs: - self._stubs['create_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/CreateWorkload', - request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_workload'] - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - assuredworkloads.Workload]: - r"""Return a callable for the update workload method over gRPC. - - Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - Returns: - Callable[[~.UpdateWorkloadRequest], - ~.Workload]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_workload' not in self._stubs: - self._stubs['update_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/UpdateWorkload', - request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['update_workload'] - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - assuredworkloads.RestrictAllowedResourcesResponse]: - r"""Return a callable for the restrict allowed resources method over gRPC. - - Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - Returns: - Callable[[~.RestrictAllowedResourcesRequest], - ~.RestrictAllowedResourcesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restrict_allowed_resources' not in self._stubs: - self._stubs['restrict_allowed_resources'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/RestrictAllowedResources', - request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, - response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, - ) - return self._stubs['restrict_allowed_resources'] - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete workload method over gRPC. - - Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. - - Returns: - Callable[[~.DeleteWorkloadRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_workload' not in self._stubs: - self._stubs['delete_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/DeleteWorkload', - request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_workload'] - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - assuredworkloads.Workload]: - r"""Return a callable for the get workload method over gRPC. - - Gets Assured Workload associated with a CRM Node - - Returns: - Callable[[~.GetWorkloadRequest], - ~.Workload]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_workload' not in self._stubs: - self._stubs['get_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetWorkload', - request_serializer=assuredworkloads.GetWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['get_workload'] - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - assuredworkloads.ListWorkloadsResponse]: - r"""Return a callable for the list workloads method over gRPC. - - Lists Assured Workloads under a CRM Node. - - Returns: - Callable[[~.ListWorkloadsRequest], - ~.ListWorkloadsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_workloads' not in self._stubs: - self._stubs['list_workloads'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListWorkloads', - request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, - response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, - ) - return self._stubs['list_workloads'] - - @property - def list_violations(self) -> Callable[ - [assuredworkloads.ListViolationsRequest], - assuredworkloads.ListViolationsResponse]: - r"""Return a callable for the list violations method over gRPC. - - Lists the Violations in the AssuredWorkload Environment. Callers - may also choose to read across multiple Workloads as per - `AIP-159 `__ by using '-' (the - hyphen or dash character) as a wildcard character instead of - workload-id in the parent. Format - ``organizations/{org_id}/locations/{location}/workloads/-`` - - Returns: - Callable[[~.ListViolationsRequest], - ~.ListViolationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_violations' not in self._stubs: - self._stubs['list_violations'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListViolations', - request_serializer=assuredworkloads.ListViolationsRequest.serialize, - response_deserializer=assuredworkloads.ListViolationsResponse.deserialize, - ) - return self._stubs['list_violations'] - - @property - def get_violation(self) -> Callable[ - [assuredworkloads.GetViolationRequest], - assuredworkloads.Violation]: - r"""Return a callable for the get violation method over gRPC. - - Retrieves Assured Workload Violation based on ID. - - Returns: - Callable[[~.GetViolationRequest], - ~.Violation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_violation' not in self._stubs: - self._stubs['get_violation'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetViolation', - request_serializer=assuredworkloads.GetViolationRequest.serialize, - response_deserializer=assuredworkloads.Violation.deserialize, - ) - return self._stubs['get_violation'] - - @property - def acknowledge_violation(self) -> Callable[ - [assuredworkloads.AcknowledgeViolationRequest], - assuredworkloads.AcknowledgeViolationResponse]: - r"""Return a callable for the acknowledge violation method over gRPC. - - Acknowledges an existing violation. By acknowledging - a violation, users acknowledge the existence of a - compliance violation in their workload and decide to - ignore it due to a valid business justification. - Acknowledgement is a permanent operation and it cannot - be reverted. - - Returns: - Callable[[~.AcknowledgeViolationRequest], - ~.AcknowledgeViolationResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'acknowledge_violation' not in self._stubs: - self._stubs['acknowledge_violation'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/AcknowledgeViolation', - request_serializer=assuredworkloads.AcknowledgeViolationRequest.serialize, - response_deserializer=assuredworkloads.AcknowledgeViolationResponse.deserialize, - ) - return self._stubs['acknowledge_violation'] - - def close(self): - self._logged_channel.close() - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AssuredWorkloadsServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc_asyncio.py deleted file mode 100644 index 0dfd638735b7..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,703 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AssuredWorkloadsServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssuredWorkloadsServiceGrpcAsyncIOTransport(AssuredWorkloadsServiceTransport): - """gRPC AsyncIO backend transport for AssuredWorkloadsService. - - Service to manage AssuredWorkloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create workload method over gRPC. - - Creates Assured Workload. - - Returns: - Callable[[~.CreateWorkloadRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_workload' not in self._stubs: - self._stubs['create_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/CreateWorkload', - request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_workload'] - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - Awaitable[assuredworkloads.Workload]]: - r"""Return a callable for the update workload method over gRPC. - - Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - Returns: - Callable[[~.UpdateWorkloadRequest], - Awaitable[~.Workload]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_workload' not in self._stubs: - self._stubs['update_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/UpdateWorkload', - request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['update_workload'] - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - Awaitable[assuredworkloads.RestrictAllowedResourcesResponse]]: - r"""Return a callable for the restrict allowed resources method over gRPC. - - Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - Returns: - Callable[[~.RestrictAllowedResourcesRequest], - Awaitable[~.RestrictAllowedResourcesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restrict_allowed_resources' not in self._stubs: - self._stubs['restrict_allowed_resources'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/RestrictAllowedResources', - request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, - response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, - ) - return self._stubs['restrict_allowed_resources'] - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete workload method over gRPC. - - Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. - - Returns: - Callable[[~.DeleteWorkloadRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_workload' not in self._stubs: - self._stubs['delete_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/DeleteWorkload', - request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_workload'] - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - Awaitable[assuredworkloads.Workload]]: - r"""Return a callable for the get workload method over gRPC. - - Gets Assured Workload associated with a CRM Node - - Returns: - Callable[[~.GetWorkloadRequest], - Awaitable[~.Workload]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_workload' not in self._stubs: - self._stubs['get_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetWorkload', - request_serializer=assuredworkloads.GetWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['get_workload'] - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - Awaitable[assuredworkloads.ListWorkloadsResponse]]: - r"""Return a callable for the list workloads method over gRPC. - - Lists Assured Workloads under a CRM Node. - - Returns: - Callable[[~.ListWorkloadsRequest], - Awaitable[~.ListWorkloadsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_workloads' not in self._stubs: - self._stubs['list_workloads'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListWorkloads', - request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, - response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, - ) - return self._stubs['list_workloads'] - - @property - def list_violations(self) -> Callable[ - [assuredworkloads.ListViolationsRequest], - Awaitable[assuredworkloads.ListViolationsResponse]]: - r"""Return a callable for the list violations method over gRPC. - - Lists the Violations in the AssuredWorkload Environment. Callers - may also choose to read across multiple Workloads as per - `AIP-159 `__ by using '-' (the - hyphen or dash character) as a wildcard character instead of - workload-id in the parent. Format - ``organizations/{org_id}/locations/{location}/workloads/-`` - - Returns: - Callable[[~.ListViolationsRequest], - Awaitable[~.ListViolationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_violations' not in self._stubs: - self._stubs['list_violations'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/ListViolations', - request_serializer=assuredworkloads.ListViolationsRequest.serialize, - response_deserializer=assuredworkloads.ListViolationsResponse.deserialize, - ) - return self._stubs['list_violations'] - - @property - def get_violation(self) -> Callable[ - [assuredworkloads.GetViolationRequest], - Awaitable[assuredworkloads.Violation]]: - r"""Return a callable for the get violation method over gRPC. - - Retrieves Assured Workload Violation based on ID. - - Returns: - Callable[[~.GetViolationRequest], - Awaitable[~.Violation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_violation' not in self._stubs: - self._stubs['get_violation'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/GetViolation', - request_serializer=assuredworkloads.GetViolationRequest.serialize, - response_deserializer=assuredworkloads.Violation.deserialize, - ) - return self._stubs['get_violation'] - - @property - def acknowledge_violation(self) -> Callable[ - [assuredworkloads.AcknowledgeViolationRequest], - Awaitable[assuredworkloads.AcknowledgeViolationResponse]]: - r"""Return a callable for the acknowledge violation method over gRPC. - - Acknowledges an existing violation. By acknowledging - a violation, users acknowledge the existence of a - compliance violation in their workload and decide to - ignore it due to a valid business justification. - Acknowledgement is a permanent operation and it cannot - be reverted. - - Returns: - Callable[[~.AcknowledgeViolationRequest], - Awaitable[~.AcknowledgeViolationResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'acknowledge_violation' not in self._stubs: - self._stubs['acknowledge_violation'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1.AssuredWorkloadsService/AcknowledgeViolation', - request_serializer=assuredworkloads.AcknowledgeViolationRequest.serialize, - response_deserializer=assuredworkloads.AcknowledgeViolationResponse.deserialize, - ) - return self._stubs['acknowledge_violation'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_workload: self._wrap_method( - self.create_workload, - default_timeout=None, - client_info=client_info, - ), - self.update_workload: self._wrap_method( - self.update_workload, - default_timeout=None, - client_info=client_info, - ), - self.restrict_allowed_resources: self._wrap_method( - self.restrict_allowed_resources, - default_timeout=None, - client_info=client_info, - ), - self.delete_workload: self._wrap_method( - self.delete_workload, - default_timeout=None, - client_info=client_info, - ), - self.get_workload: self._wrap_method( - self.get_workload, - default_timeout=None, - client_info=client_info, - ), - self.list_workloads: self._wrap_method( - self.list_workloads, - default_timeout=None, - client_info=client_info, - ), - self.list_violations: self._wrap_method( - self.list_violations, - default_timeout=None, - client_info=client_info, - ), - self.get_violation: self._wrap_method( - self.get_violation, - default_timeout=None, - client_info=client_info, - ), - self.acknowledge_violation: self._wrap_method( - self.acknowledge_violation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'AssuredWorkloadsServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py deleted file mode 100644 index 5a1ed2c5d685..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest.py +++ /dev/null @@ -1,1562 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAssuredWorkloadsServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AssuredWorkloadsServiceRestInterceptor: - """Interceptor for AssuredWorkloadsService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AssuredWorkloadsServiceRestTransport. - - .. code-block:: python - class MyCustomAssuredWorkloadsServiceInterceptor(AssuredWorkloadsServiceRestInterceptor): - def pre_acknowledge_violation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_acknowledge_violation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_workload(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_violation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_violation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_workload(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_violations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_violations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_workloads(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_workloads(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restrict_allowed_resources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restrict_allowed_resources(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_workload(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AssuredWorkloadsServiceRestTransport(interceptor=MyCustomAssuredWorkloadsServiceInterceptor()) - client = AssuredWorkloadsServiceClient(transport=transport) - - - """ - def pre_create_workload(self, request: assuredworkloads.CreateWorkloadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.CreateWorkloadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_workload - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_create_workload(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_workload - - DEPRECATED. Please use the `post_create_workload_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_create_workload` interceptor runs - before the `post_create_workload_with_metadata` interceptor. - """ - return response - - def post_create_workload_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_workload - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_create_workload_with_metadata` - interceptor in new development instead of the `post_create_workload` interceptor. - When both interceptors are used, this `post_create_workload_with_metadata` interceptor runs after the - `post_create_workload` interceptor. The (possibly modified) response returned by - `post_create_workload` will be passed to - `post_create_workload_with_metadata`. - """ - return response, metadata - - def pre_delete_workload(self, request: assuredworkloads.DeleteWorkloadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.DeleteWorkloadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_workload - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def pre_get_workload(self, request: assuredworkloads.GetWorkloadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.GetWorkloadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_workload - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_get_workload(self, response: assuredworkloads.Workload) -> assuredworkloads.Workload: - """Post-rpc interceptor for get_workload - - DEPRECATED. Please use the `post_get_workload_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_get_workload` interceptor runs - before the `post_get_workload_with_metadata` interceptor. - """ - return response - - def post_get_workload_with_metadata(self, response: assuredworkloads.Workload, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_workload - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_get_workload_with_metadata` - interceptor in new development instead of the `post_get_workload` interceptor. - When both interceptors are used, this `post_get_workload_with_metadata` interceptor runs after the - `post_get_workload` interceptor. The (possibly modified) response returned by - `post_get_workload` will be passed to - `post_get_workload_with_metadata`. - """ - return response, metadata - - def pre_list_workloads(self, request: assuredworkloads.ListWorkloadsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.ListWorkloadsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_workloads - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_list_workloads(self, response: assuredworkloads.ListWorkloadsResponse) -> assuredworkloads.ListWorkloadsResponse: - """Post-rpc interceptor for list_workloads - - DEPRECATED. Please use the `post_list_workloads_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_list_workloads` interceptor runs - before the `post_list_workloads_with_metadata` interceptor. - """ - return response - - def post_list_workloads_with_metadata(self, response: assuredworkloads.ListWorkloadsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.ListWorkloadsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_workloads - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_list_workloads_with_metadata` - interceptor in new development instead of the `post_list_workloads` interceptor. - When both interceptors are used, this `post_list_workloads_with_metadata` interceptor runs after the - `post_list_workloads` interceptor. The (possibly modified) response returned by - `post_list_workloads` will be passed to - `post_list_workloads_with_metadata`. - """ - return response, metadata - - def pre_restrict_allowed_resources(self, request: assuredworkloads.RestrictAllowedResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.RestrictAllowedResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for restrict_allowed_resources - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_restrict_allowed_resources(self, response: assuredworkloads.RestrictAllowedResourcesResponse) -> assuredworkloads.RestrictAllowedResourcesResponse: - """Post-rpc interceptor for restrict_allowed_resources - - DEPRECATED. Please use the `post_restrict_allowed_resources_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_restrict_allowed_resources` interceptor runs - before the `post_restrict_allowed_resources_with_metadata` interceptor. - """ - return response - - def post_restrict_allowed_resources_with_metadata(self, response: assuredworkloads.RestrictAllowedResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.RestrictAllowedResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restrict_allowed_resources - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_restrict_allowed_resources_with_metadata` - interceptor in new development instead of the `post_restrict_allowed_resources` interceptor. - When both interceptors are used, this `post_restrict_allowed_resources_with_metadata` interceptor runs after the - `post_restrict_allowed_resources` interceptor. The (possibly modified) response returned by - `post_restrict_allowed_resources` will be passed to - `post_restrict_allowed_resources_with_metadata`. - """ - return response, metadata - - def pre_update_workload(self, request: assuredworkloads.UpdateWorkloadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.UpdateWorkloadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_workload - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_update_workload(self, response: assuredworkloads.Workload) -> assuredworkloads.Workload: - """Post-rpc interceptor for update_workload - - DEPRECATED. Please use the `post_update_workload_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_update_workload` interceptor runs - before the `post_update_workload_with_metadata` interceptor. - """ - return response - - def post_update_workload_with_metadata(self, response: assuredworkloads.Workload, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.Workload, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_workload - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_update_workload_with_metadata` - interceptor in new development instead of the `post_update_workload` interceptor. - When both interceptors are used, this `post_update_workload_with_metadata` interceptor runs after the - `post_update_workload` interceptor. The (possibly modified) response returned by - `post_update_workload` will be passed to - `post_update_workload_with_metadata`. - """ - return response, metadata - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AssuredWorkloadsServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AssuredWorkloadsServiceRestInterceptor - - -class AssuredWorkloadsServiceRestTransport(_BaseAssuredWorkloadsServiceRestTransport): - """REST backend synchronous transport for AssuredWorkloadsService. - - Service to manage AssuredWorkloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssuredWorkloadsServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AssuredWorkloadsServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _AcknowledgeViolation(_BaseAssuredWorkloadsServiceRestTransport._BaseAcknowledgeViolation, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.AcknowledgeViolation") - - def __call__(self, - request: assuredworkloads.AcknowledgeViolationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.AcknowledgeViolationResponse: - raise NotImplementedError( - "Method AcknowledgeViolation is not available over REST transport" - ) - class _CreateWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.CreateWorkload") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: assuredworkloads.CreateWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create workload method over HTTP. - - Args: - request (~.assuredworkloads.CreateWorkloadRequest): - The request object. Request for creating a workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_http_options() - - request, metadata = self._interceptor.pre_create_workload(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_transcoded_request(http_options, request) - - body = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.CreateWorkload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "CreateWorkload", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._CreateWorkload._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_workload(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_workload_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.create_workload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "CreateWorkload", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.DeleteWorkload") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: assuredworkloads.DeleteWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete workload method over HTTP. - - Args: - request (~.assuredworkloads.DeleteWorkloadRequest): - The request object. Request for deleting a Workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_http_options() - - request, metadata = self._interceptor.pre_delete_workload(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.DeleteWorkload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "DeleteWorkload", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._DeleteWorkload._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetViolation(_BaseAssuredWorkloadsServiceRestTransport._BaseGetViolation, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.GetViolation") - - def __call__(self, - request: assuredworkloads.GetViolationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.Violation: - raise NotImplementedError( - "Method GetViolation is not available over REST transport" - ) - class _GetWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseGetWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.GetWorkload") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: assuredworkloads.GetWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.Workload: - r"""Call the get workload method over HTTP. - - Args: - request (~.assuredworkloads.GetWorkloadRequest): - The request object. Request for fetching a workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.assuredworkloads.Workload: - A Workload object for managing highly - regulated workloads of cloud customers. - - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseGetWorkload._get_http_options() - - request, metadata = self._interceptor.pre_get_workload(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseGetWorkload._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseGetWorkload._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.GetWorkload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "GetWorkload", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._GetWorkload._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = assuredworkloads.Workload() - pb_resp = assuredworkloads.Workload.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_workload(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_workload_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = assuredworkloads.Workload.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.get_workload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "GetWorkload", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListViolations(_BaseAssuredWorkloadsServiceRestTransport._BaseListViolations, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.ListViolations") - - def __call__(self, - request: assuredworkloads.ListViolationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.ListViolationsResponse: - raise NotImplementedError( - "Method ListViolations is not available over REST transport" - ) - class _ListWorkloads(_BaseAssuredWorkloadsServiceRestTransport._BaseListWorkloads, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.ListWorkloads") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: assuredworkloads.ListWorkloadsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.ListWorkloadsResponse: - r"""Call the list workloads method over HTTP. - - Args: - request (~.assuredworkloads.ListWorkloadsRequest): - The request object. Request for fetching workloads in an - organization. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.assuredworkloads.ListWorkloadsResponse: - Response of ListWorkloads endpoint. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseListWorkloads._get_http_options() - - request, metadata = self._interceptor.pre_list_workloads(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseListWorkloads._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseListWorkloads._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.ListWorkloads", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "ListWorkloads", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._ListWorkloads._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = assuredworkloads.ListWorkloadsResponse() - pb_resp = assuredworkloads.ListWorkloadsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_workloads(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_workloads_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = assuredworkloads.ListWorkloadsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.list_workloads", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "ListWorkloads", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RestrictAllowedResources(_BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.RestrictAllowedResources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: assuredworkloads.RestrictAllowedResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.RestrictAllowedResourcesResponse: - r"""Call the restrict allowed - resources method over HTTP. - - Args: - request (~.assuredworkloads.RestrictAllowedResourcesRequest): - The request object. Request for restricting list of - available resources in Workload - environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.assuredworkloads.RestrictAllowedResourcesResponse: - Response for restricting the list of - allowed resources. - - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_http_options() - - request, metadata = self._interceptor.pre_restrict_allowed_resources(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_transcoded_request(http_options, request) - - body = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.RestrictAllowedResources", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "RestrictAllowedResources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._RestrictAllowedResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = assuredworkloads.RestrictAllowedResourcesResponse() - pb_resp = assuredworkloads.RestrictAllowedResourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restrict_allowed_resources(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restrict_allowed_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = assuredworkloads.RestrictAllowedResourcesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.restrict_allowed_resources", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "RestrictAllowedResources", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.UpdateWorkload") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: assuredworkloads.UpdateWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.Workload: - r"""Call the update workload method over HTTP. - - Args: - request (~.assuredworkloads.UpdateWorkloadRequest): - The request object. Request for Updating a workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.assuredworkloads.Workload: - A Workload object for managing highly - regulated workloads of cloud customers. - - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload._get_http_options() - - request, metadata = self._interceptor.pre_update_workload(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload._get_transcoded_request(http_options, request) - - body = _BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.UpdateWorkload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "UpdateWorkload", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._UpdateWorkload._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = assuredworkloads.Workload() - pb_resp = assuredworkloads.Workload.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_workload(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_workload_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = assuredworkloads.Workload.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.update_workload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "UpdateWorkload", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def acknowledge_violation(self) -> Callable[ - [assuredworkloads.AcknowledgeViolationRequest], - assuredworkloads.AcknowledgeViolationResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AcknowledgeViolation(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_violation(self) -> Callable[ - [assuredworkloads.GetViolationRequest], - assuredworkloads.Violation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetViolation(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - assuredworkloads.Workload]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_violations(self) -> Callable[ - [assuredworkloads.ListViolationsRequest], - assuredworkloads.ListViolationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListViolations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - assuredworkloads.ListWorkloadsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListWorkloads(self._session, self._host, self._interceptor) # type: ignore - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - assuredworkloads.RestrictAllowedResourcesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestrictAllowedResources(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - assuredworkloads.Workload]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseAssuredWorkloadsServiceRestTransport._BaseListOperations, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AssuredWorkloadsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest_base.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest_base.py deleted file mode 100644 index 541cd38070e2..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/services/assured_workloads_service/transports/rest_base.py +++ /dev/null @@ -1,407 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAssuredWorkloadsServiceRestTransport(AssuredWorkloadsServiceTransport): - """Base REST backend transport for AssuredWorkloadsService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseAcknowledgeViolation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseCreateWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=organizations/*/locations/*}/workloads', - 'body': 'workload', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.CreateWorkloadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=organizations/*/locations/*/workloads/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.DeleteWorkloadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetViolation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseGetWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/workloads/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.GetWorkloadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseGetWorkload._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListViolations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseListWorkloads: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=organizations/*/locations/*}/workloads', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.ListWorkloadsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseListWorkloads._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRestrictAllowedResources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=organizations/*/locations/*/workloads/*}:restrictAllowedResources', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.RestrictAllowedResourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{workload.name=organizations/*/locations/*/workloads/*}', - 'body': 'workload', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.UpdateWorkloadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAssuredWorkloadsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/types/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/types/__init__.py deleted file mode 100644 index 5185b3a44053..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/types/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .assuredworkloads import ( - AcknowledgeViolationRequest, - AcknowledgeViolationResponse, - CreateWorkloadOperationMetadata, - CreateWorkloadRequest, - DeleteWorkloadRequest, - GetViolationRequest, - GetWorkloadRequest, - ListViolationsRequest, - ListViolationsResponse, - ListWorkloadsRequest, - ListWorkloadsResponse, - RestrictAllowedResourcesRequest, - RestrictAllowedResourcesResponse, - TimeWindow, - UpdateWorkloadRequest, - Violation, - Workload, -) - -__all__ = ( - 'AcknowledgeViolationRequest', - 'AcknowledgeViolationResponse', - 'CreateWorkloadOperationMetadata', - 'CreateWorkloadRequest', - 'DeleteWorkloadRequest', - 'GetViolationRequest', - 'GetWorkloadRequest', - 'ListViolationsRequest', - 'ListViolationsResponse', - 'ListWorkloadsRequest', - 'ListWorkloadsResponse', - 'RestrictAllowedResourcesRequest', - 'RestrictAllowedResourcesResponse', - 'TimeWindow', - 'UpdateWorkloadRequest', - 'Violation', - 'Workload', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/types/assuredworkloads.py b/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/types/assuredworkloads.py deleted file mode 100644 index 73394fefda1a..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/google/cloud/assuredworkloads_v1/types/assuredworkloads.py +++ /dev/null @@ -1,1161 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.assuredworkloads.v1', - manifest={ - 'CreateWorkloadRequest', - 'UpdateWorkloadRequest', - 'DeleteWorkloadRequest', - 'GetWorkloadRequest', - 'ListWorkloadsRequest', - 'ListWorkloadsResponse', - 'Workload', - 'CreateWorkloadOperationMetadata', - 'RestrictAllowedResourcesRequest', - 'RestrictAllowedResourcesResponse', - 'AcknowledgeViolationRequest', - 'AcknowledgeViolationResponse', - 'TimeWindow', - 'ListViolationsRequest', - 'ListViolationsResponse', - 'GetViolationRequest', - 'Violation', - }, -) - - -class CreateWorkloadRequest(proto.Message): - r"""Request for creating a workload. - - Attributes: - parent (str): - Required. The resource name of the new Workload's parent. - Must be of the form - ``organizations/{org_id}/locations/{location_id}``. - workload (google.cloud.assuredworkloads_v1.types.Workload): - Required. Assured Workload to create - external_id (str): - Optional. A identifier associated with the - workload and underlying projects which allows - for the break down of billing costs for a - workload. The value provided for the identifier - will add a label to the workload and contained - projects with the identifier as the value. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - workload: 'Workload' = proto.Field( - proto.MESSAGE, - number=2, - message='Workload', - ) - external_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateWorkloadRequest(proto.Message): - r"""Request for Updating a workload. - - Attributes: - workload (google.cloud.assuredworkloads_v1.types.Workload): - Required. The workload to update. The workload's ``name`` - field is used to identify the workload to be updated. - Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. - """ - - workload: 'Workload' = proto.Field( - proto.MESSAGE, - number=1, - message='Workload', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteWorkloadRequest(proto.Message): - r"""Request for deleting a Workload. - - Attributes: - name (str): - Required. The ``name`` field is used to identify the - workload. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - etag (str): - Optional. The etag of the workload. - If this is provided, it must match the server's - etag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetWorkloadRequest(proto.Message): - r"""Request for fetching a workload. - - Attributes: - name (str): - Required. The resource name of the Workload to fetch. This - is the workload's relative path in the API, formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListWorkloadsRequest(proto.Message): - r"""Request for fetching workloads in an organization. - - Attributes: - parent (str): - Required. Parent Resource to list workloads from. Must be of - the form ``organizations/{org_id}/locations/{location}``. - page_size (int): - Page size. - page_token (str): - Page token returned from previous request. - Page token contains context from previous - request. Page token needs to be passed in the - second and following requests. - filter (str): - A custom filter for filtering by properties - of a workload. At this time, only filtering by - labels is supported. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListWorkloadsResponse(proto.Message): - r"""Response of ListWorkloads endpoint. - - Attributes: - workloads (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload]): - List of Workloads under a given parent. - next_page_token (str): - The next page token. Return empty if reached - the last page. - """ - - @property - def raw_page(self): - return self - - workloads: MutableSequence['Workload'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Workload', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Workload(proto.Message): - r"""A Workload object for managing highly regulated workloads of - cloud customers. - - Attributes: - name (str): - Optional. The resource name of the workload. - Format: - - organizations/{organization}/locations/{location}/workloads/{workload} - - Read-only. - display_name (str): - Required. The user-assigned display name of - the Workload. When present it must be between 4 - to 30 characters. Allowed characters are: - lowercase and uppercase letters, numbers, - hyphen, and spaces. - - Example: My Workload - resources (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload.ResourceInfo]): - Output only. The resources associated with - this workload. These resources will be created - when creating the workload. If any of the - projects already exist, the workload creation - will fail. Always read only. - compliance_regime (google.cloud.assuredworkloads_v1.types.Workload.ComplianceRegime): - Required. Immutable. Compliance Regime - associated with this workload. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Immutable. The Workload creation - timestamp. - billing_account (str): - Optional. The billing account used for the resources which - are direct children of workload. This billing account is - initially associated with the resources created as part of - Workload creation. After the initial creation of these - resources, the customer can change the assigned billing - account. The resource name has the form - ``billingAccounts/{billing_account_id}``. For example, - ``billingAccounts/012345-567890-ABCDEF``. - etag (str): - Optional. ETag of the workload, it is - calculated on the basis of the Workload - contents. It will be used in Update & Delete - operations. - labels (MutableMapping[str, str]): - Optional. Labels applied to the workload. - provisioned_resources_parent (str): - Input only. The parent resource for the resources managed by - this Assured Workload. May be either empty or a folder - resource which is a child of the Workload parent. If not - specified all resources are created under the parent - organization. Format: folders/{folder_id} - kms_settings (google.cloud.assuredworkloads_v1.types.Workload.KMSSettings): - Input only. Settings used to create a CMEK crypto key. When - set, a project with a KMS CMEK key is provisioned. This - field is deprecated as of Feb 28, 2022. In order to create a - Keyring, callers should specify, ENCRYPTION_KEYS_PROJECT or - KEYRING in ResourceSettings.resource_type field. - resource_settings (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload.ResourceSettings]): - Input only. Resource properties that are used - to customize workload resources. These - properties (such as custom project id) will be - used to create workload resources if possible. - This field is optional. - kaj_enrollment_state (google.cloud.assuredworkloads_v1.types.Workload.KajEnrollmentState): - Output only. Represents the KAJ enrollment - state of the given workload. - enable_sovereign_controls (bool): - Optional. Indicates the sovereignty status of - the given workload. Currently meant to be used - by Europe/Canada customers. - saa_enrollment_response (google.cloud.assuredworkloads_v1.types.Workload.SaaEnrollmentResponse): - Output only. Represents the SAA enrollment - response of the given workload. SAA enrollment - response is queried during GetWorkload call. In - failure cases, user friendly error message is - shown in SAA details page. - compliant_but_disallowed_services (MutableSequence[str]): - Output only. Urls for services which are - compliant for this Assured Workload, but which - are currently disallowed by the - ResourceUsageRestriction org policy. Invoke - RestrictAllowedResources endpoint to allow your - project developers to use these services in - their environment.". - partner (google.cloud.assuredworkloads_v1.types.Workload.Partner): - Optional. Compliance Regime associated with - this workload. - """ - class ComplianceRegime(proto.Enum): - r"""Supported Compliance Regimes. - - Values: - COMPLIANCE_REGIME_UNSPECIFIED (0): - Unknown compliance regime. - IL4 (1): - Information protection as per DoD IL4 - requirements. - CJIS (2): - Criminal Justice Information Services (CJIS) - Security policies. - FEDRAMP_HIGH (3): - FedRAMP High data protection controls - FEDRAMP_MODERATE (4): - FedRAMP Moderate data protection controls - US_REGIONAL_ACCESS (5): - Assured Workloads For US Regions data - protection controls - HIPAA (6): - Health Insurance Portability and - Accountability Act controls - HITRUST (7): - Health Information Trust Alliance controls - EU_REGIONS_AND_SUPPORT (8): - Assured Workloads For EU Regions and Support - controls - CA_REGIONS_AND_SUPPORT (9): - Assured Workloads For Canada Regions and - Support controls - ITAR (10): - International Traffic in Arms Regulations - AU_REGIONS_AND_US_SUPPORT (11): - Assured Workloads for Australia Regions and - Support controls Available for public preview - consumption. Don't create production workloads. - ASSURED_WORKLOADS_FOR_PARTNERS (12): - Assured Workloads for Partners - """ - COMPLIANCE_REGIME_UNSPECIFIED = 0 - IL4 = 1 - CJIS = 2 - FEDRAMP_HIGH = 3 - FEDRAMP_MODERATE = 4 - US_REGIONAL_ACCESS = 5 - HIPAA = 6 - HITRUST = 7 - EU_REGIONS_AND_SUPPORT = 8 - CA_REGIONS_AND_SUPPORT = 9 - ITAR = 10 - AU_REGIONS_AND_US_SUPPORT = 11 - ASSURED_WORKLOADS_FOR_PARTNERS = 12 - - class KajEnrollmentState(proto.Enum): - r"""Key Access Justifications(KAJ) Enrollment State. - - Values: - KAJ_ENROLLMENT_STATE_UNSPECIFIED (0): - Default State for KAJ Enrollment. - KAJ_ENROLLMENT_STATE_PENDING (1): - Pending State for KAJ Enrollment. - KAJ_ENROLLMENT_STATE_COMPLETE (2): - Complete State for KAJ Enrollment. - """ - KAJ_ENROLLMENT_STATE_UNSPECIFIED = 0 - KAJ_ENROLLMENT_STATE_PENDING = 1 - KAJ_ENROLLMENT_STATE_COMPLETE = 2 - - class Partner(proto.Enum): - r"""Supported Assured Workloads Partners. - - Values: - PARTNER_UNSPECIFIED (0): - Unknown partner regime/controls. - LOCAL_CONTROLS_BY_S3NS (1): - S3NS regime/controls. - """ - PARTNER_UNSPECIFIED = 0 - LOCAL_CONTROLS_BY_S3NS = 1 - - class ResourceInfo(proto.Message): - r"""Represent the resources that are children of this Workload. - - Attributes: - resource_id (int): - Resource identifier. For a project this represents - project_number. - resource_type (google.cloud.assuredworkloads_v1.types.Workload.ResourceInfo.ResourceType): - Indicates the type of resource. - """ - class ResourceType(proto.Enum): - r"""The type of resource. - - Values: - RESOURCE_TYPE_UNSPECIFIED (0): - Unknown resource type. - CONSUMER_PROJECT (1): - Consumer project. AssuredWorkloads Projects are no longer - supported. This field will be ignored only in CreateWorkload - requests. ListWorkloads and GetWorkload will continue to - provide projects information. Use CONSUMER_FOLDER instead. - CONSUMER_FOLDER (4): - Consumer Folder. - ENCRYPTION_KEYS_PROJECT (2): - Consumer project containing encryption keys. - KEYRING (3): - Keyring resource that hosts encryption keys. - """ - RESOURCE_TYPE_UNSPECIFIED = 0 - CONSUMER_PROJECT = 1 - CONSUMER_FOLDER = 4 - ENCRYPTION_KEYS_PROJECT = 2 - KEYRING = 3 - - resource_id: int = proto.Field( - proto.INT64, - number=1, - ) - resource_type: 'Workload.ResourceInfo.ResourceType' = proto.Field( - proto.ENUM, - number=2, - enum='Workload.ResourceInfo.ResourceType', - ) - - class KMSSettings(proto.Message): - r"""Settings specific to the Key Management Service. This message is - deprecated. In order to create a Keyring, callers should specify, - ENCRYPTION_KEYS_PROJECT or KEYRING in ResourceSettings.resource_type - field. - - Attributes: - next_rotation_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Input only. Immutable. The time at - which the Key Management Service will - automatically create a new version of the crypto - key and mark it as the primary. - rotation_period (google.protobuf.duration_pb2.Duration): - Required. Input only. Immutable. [next_rotation_time] will - be advanced by this period when the Key Management Service - automatically rotates a key. Must be at least 24 hours and - at most 876,000 hours. - """ - - next_rotation_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - rotation_period: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - class ResourceSettings(proto.Message): - r"""Represent the custom settings for the resources to be - created. - - Attributes: - resource_id (str): - Resource identifier. For a project this represents - project_id. If the project is already taken, the workload - creation will fail. For KeyRing, this represents the - keyring_id. For a folder, don't set this value as folder_id - is assigned by Google. - resource_type (google.cloud.assuredworkloads_v1.types.Workload.ResourceInfo.ResourceType): - Indicates the type of resource. This field should be - specified to correspond the id to the right resource type - (CONSUMER_FOLDER or ENCRYPTION_KEYS_PROJECT) - display_name (str): - User-assigned resource display name. - If not empty it will be used to create a - resource with the specified name. - """ - - resource_id: str = proto.Field( - proto.STRING, - number=1, - ) - resource_type: 'Workload.ResourceInfo.ResourceType' = proto.Field( - proto.ENUM, - number=2, - enum='Workload.ResourceInfo.ResourceType', - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - - class SaaEnrollmentResponse(proto.Message): - r"""Signed Access Approvals (SAA) enrollment response. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - setup_status (google.cloud.assuredworkloads_v1.types.Workload.SaaEnrollmentResponse.SetupState): - Indicates SAA enrollment status of a given - workload. - - This field is a member of `oneof`_ ``_setup_status``. - setup_errors (MutableSequence[google.cloud.assuredworkloads_v1.types.Workload.SaaEnrollmentResponse.SetupError]): - Indicates SAA enrollment setup error if any. - """ - class SetupState(proto.Enum): - r"""Setup state of SAA enrollment. - - Values: - SETUP_STATE_UNSPECIFIED (0): - Unspecified. - STATUS_PENDING (1): - SAA enrollment pending. - STATUS_COMPLETE (2): - SAA enrollment comopleted. - """ - SETUP_STATE_UNSPECIFIED = 0 - STATUS_PENDING = 1 - STATUS_COMPLETE = 2 - - class SetupError(proto.Enum): - r"""Setup error of SAA enrollment. - - Values: - SETUP_ERROR_UNSPECIFIED (0): - Unspecified. - ERROR_INVALID_BASE_SETUP (1): - Invalid states for all customers, to be - redirected to AA UI for additional details. - ERROR_MISSING_EXTERNAL_SIGNING_KEY (2): - Returned when there is not an EKM key - configured. - ERROR_NOT_ALL_SERVICES_ENROLLED (3): - Returned when there are no enrolled services - or the customer is enrolled in CAA only for a - subset of services. - ERROR_SETUP_CHECK_FAILED (4): - Returned when exception was encountered - during evaluation of other criteria. - """ - SETUP_ERROR_UNSPECIFIED = 0 - ERROR_INVALID_BASE_SETUP = 1 - ERROR_MISSING_EXTERNAL_SIGNING_KEY = 2 - ERROR_NOT_ALL_SERVICES_ENROLLED = 3 - ERROR_SETUP_CHECK_FAILED = 4 - - setup_status: 'Workload.SaaEnrollmentResponse.SetupState' = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum='Workload.SaaEnrollmentResponse.SetupState', - ) - setup_errors: MutableSequence['Workload.SaaEnrollmentResponse.SetupError'] = proto.RepeatedField( - proto.ENUM, - number=2, - enum='Workload.SaaEnrollmentResponse.SetupError', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - resources: MutableSequence[ResourceInfo] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=ResourceInfo, - ) - compliance_regime: ComplianceRegime = proto.Field( - proto.ENUM, - number=4, - enum=ComplianceRegime, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - billing_account: str = proto.Field( - proto.STRING, - number=6, - ) - etag: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - provisioned_resources_parent: str = proto.Field( - proto.STRING, - number=13, - ) - kms_settings: KMSSettings = proto.Field( - proto.MESSAGE, - number=14, - message=KMSSettings, - ) - resource_settings: MutableSequence[ResourceSettings] = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=ResourceSettings, - ) - kaj_enrollment_state: KajEnrollmentState = proto.Field( - proto.ENUM, - number=17, - enum=KajEnrollmentState, - ) - enable_sovereign_controls: bool = proto.Field( - proto.BOOL, - number=18, - ) - saa_enrollment_response: SaaEnrollmentResponse = proto.Field( - proto.MESSAGE, - number=20, - message=SaaEnrollmentResponse, - ) - compliant_but_disallowed_services: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=24, - ) - partner: Partner = proto.Field( - proto.ENUM, - number=25, - enum=Partner, - ) - - -class CreateWorkloadOperationMetadata(proto.Message): - r"""Operation metadata to give request details of CreateWorkload. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Time when the operation was - created. - display_name (str): - Optional. The display name of the workload. - parent (str): - Optional. The parent of the workload. - compliance_regime (google.cloud.assuredworkloads_v1.types.Workload.ComplianceRegime): - Optional. Compliance controls that should be - applied to the resources managed by the - workload. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - parent: str = proto.Field( - proto.STRING, - number=3, - ) - compliance_regime: 'Workload.ComplianceRegime' = proto.Field( - proto.ENUM, - number=4, - enum='Workload.ComplianceRegime', - ) - - -class RestrictAllowedResourcesRequest(proto.Message): - r"""Request for restricting list of available resources in - Workload environment. - - Attributes: - name (str): - Required. The resource name of the Workload. This is the - workloads's relative path in the API, formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - restriction_type (google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest.RestrictionType): - Required. The type of restriction for using - gcp products in the Workload environment. - """ - class RestrictionType(proto.Enum): - r"""The type of restriction. - - Values: - RESTRICTION_TYPE_UNSPECIFIED (0): - Unknown restriction type. - ALLOW_ALL_GCP_RESOURCES (1): - Allow the use all of all gcp products, - irrespective of the compliance posture. This - effectively removes gcp.restrictServiceUsage - OrgPolicy on the AssuredWorkloads Folder. - ALLOW_COMPLIANT_RESOURCES (2): - Based on Workload's compliance regime, - allowed list changes. See - - https://cloud.google.com/assured-workloads/docs/supported-products - for the list of supported resources. - """ - RESTRICTION_TYPE_UNSPECIFIED = 0 - ALLOW_ALL_GCP_RESOURCES = 1 - ALLOW_COMPLIANT_RESOURCES = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - restriction_type: RestrictionType = proto.Field( - proto.ENUM, - number=2, - enum=RestrictionType, - ) - - -class RestrictAllowedResourcesResponse(proto.Message): - r"""Response for restricting the list of allowed resources. - """ - - -class AcknowledgeViolationRequest(proto.Message): - r"""Request for acknowledging the violation - Next Id: 4 - - Attributes: - name (str): - Required. The resource name of the Violation - to acknowledge. Format: - - organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} - comment (str): - Required. Business justification explaining - the need for violation acknowledgement - non_compliant_org_policy (str): - Optional. This field is deprecated and will be removed in - future version of the API. Name of the OrgPolicy which was - modified with non-compliant change and resulted in this - violation. Format: - projects/{project_number}/policies/{constraint_name} - folders/{folder_id}/policies/{constraint_name} - organizations/{organization_id}/policies/{constraint_name} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - comment: str = proto.Field( - proto.STRING, - number=2, - ) - non_compliant_org_policy: str = proto.Field( - proto.STRING, - number=3, - ) - - -class AcknowledgeViolationResponse(proto.Message): - r"""Response for violation acknowledgement - """ - - -class TimeWindow(proto.Message): - r"""Interval defining a time window. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - The start of the time window. - end_time (google.protobuf.timestamp_pb2.Timestamp): - The end of the time window. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class ListViolationsRequest(proto.Message): - r"""Request for fetching violations in an organization. - - Attributes: - parent (str): - Required. The Workload name. Format - ``organizations/{org_id}/locations/{location}/workloads/{workload}``. - interval (google.cloud.assuredworkloads_v1.types.TimeWindow): - Optional. Specifies the time window for retrieving active - Violations. When specified, retrieves Violations that were - active between start_time and end_time. - page_size (int): - Optional. Page size. - page_token (str): - Optional. Page token returned from previous - request. - filter (str): - Optional. A custom filter for filtering by - the Violations properties. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - interval: 'TimeWindow' = proto.Field( - proto.MESSAGE, - number=2, - message='TimeWindow', - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListViolationsResponse(proto.Message): - r"""Response of ListViolations endpoint. - - Attributes: - violations (MutableSequence[google.cloud.assuredworkloads_v1.types.Violation]): - List of Violations under a Workload. - next_page_token (str): - The next page token. Returns empty if reached - the last page. - """ - - @property - def raw_page(self): - return self - - violations: MutableSequence['Violation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Violation', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetViolationRequest(proto.Message): - r"""Request for fetching a Workload Violation. - - Attributes: - name (str): - Required. The resource name of the Violation - to fetch (ie. Violation.name). Format: - - organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Violation(proto.Message): - r"""Workload monitoring Violation. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Immutable. Name of the Violation. Format: - organizations/{organization}/locations/{location}/workloads/{workload_id}/violations/{violations_id} - description (str): - Output only. Description for the Violation. - e.g. OrgPolicy gcp.resourceLocations has non - compliant value. - begin_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time of the event which - triggered the Violation. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last time when the Violation - record was updated. - resolve_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time of the event which fixed - the Violation. If the violation is ACTIVE this - will be empty. - category (str): - Output only. Category under which this - violation is mapped. e.g. Location, Service - Usage, Access, Encryption, etc. - state (google.cloud.assuredworkloads_v1.types.Violation.State): - Output only. State of the violation - org_policy_constraint (str): - Output only. Immutable. The - org-policy-constraint that was incorrectly - changed, which resulted in this violation. - audit_log_link (str): - Output only. Immutable. Audit Log Link for - violated resource Format: - - https://console.cloud.google.com/logs/query;query={logName}{protoPayload.resourceName}{timeRange}{folder} - non_compliant_org_policy (str): - Output only. Immutable. Name of the OrgPolicy which was - modified with non-compliant change and resulted this - violation. Format: - projects/{project_number}/policies/{constraint_name} - folders/{folder_id}/policies/{constraint_name} - organizations/{organization_id}/policies/{constraint_name} - remediation (google.cloud.assuredworkloads_v1.types.Violation.Remediation): - Output only. Compliance violation remediation - acknowledged (bool): - Output only. A boolean that indicates if the - violation is acknowledged - acknowledgement_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Timestamp when this violation was - acknowledged last. This will be absent when - acknowledged field is marked as false. - - This field is a member of `oneof`_ ``_acknowledgement_time``. - exception_audit_log_link (str): - Output only. Immutable. Audit Log link to - find business justification provided for - violation exception. Format: - - https://console.cloud.google.com/logs/query;query={logName}{protoPayload.resourceName}{protoPayload.methodName}{timeRange}{organization} - """ - class State(proto.Enum): - r"""Violation State Values - - Values: - STATE_UNSPECIFIED (0): - Unspecified state. - RESOLVED (2): - Violation is resolved. - UNRESOLVED (3): - Violation is Unresolved - EXCEPTION (4): - Violation is Exception - """ - STATE_UNSPECIFIED = 0 - RESOLVED = 2 - UNRESOLVED = 3 - EXCEPTION = 4 - - class Remediation(proto.Message): - r"""Represents remediation guidance to resolve compliance - violation for AssuredWorkload - - Attributes: - instructions (google.cloud.assuredworkloads_v1.types.Violation.Remediation.Instructions): - Required. Remediation instructions to resolve - violations - compliant_values (MutableSequence[str]): - Values that can resolve the violation - For example: for list org policy violations, - this will either be the list of allowed or - denied values - remediation_type (google.cloud.assuredworkloads_v1.types.Violation.Remediation.RemediationType): - Output only. Reemediation type based on the - type of org policy values violated - """ - class RemediationType(proto.Enum): - r"""Classifying remediation into various types based on the kind - of violation. For example, violations caused due to changes in - boolean org policy requires different remediation instructions - compared to violation caused due to changes in allowed values of - list org policy. - - Values: - REMEDIATION_TYPE_UNSPECIFIED (0): - Unspecified remediation type - REMEDIATION_BOOLEAN_ORG_POLICY_VIOLATION (1): - Remediation type for boolean org policy - REMEDIATION_LIST_ALLOWED_VALUES_ORG_POLICY_VIOLATION (2): - Remediation type for list org policy which - have allowed values in the monitoring rule - REMEDIATION_LIST_DENIED_VALUES_ORG_POLICY_VIOLATION (3): - Remediation type for list org policy which - have denied values in the monitoring rule - REMEDIATION_RESTRICT_CMEK_CRYPTO_KEY_PROJECTS_ORG_POLICY_VIOLATION (4): - Remediation type for - gcp.restrictCmekCryptoKeyProjects - """ - REMEDIATION_TYPE_UNSPECIFIED = 0 - REMEDIATION_BOOLEAN_ORG_POLICY_VIOLATION = 1 - REMEDIATION_LIST_ALLOWED_VALUES_ORG_POLICY_VIOLATION = 2 - REMEDIATION_LIST_DENIED_VALUES_ORG_POLICY_VIOLATION = 3 - REMEDIATION_RESTRICT_CMEK_CRYPTO_KEY_PROJECTS_ORG_POLICY_VIOLATION = 4 - - class Instructions(proto.Message): - r"""Instructions to remediate violation - - Attributes: - gcloud_instructions (google.cloud.assuredworkloads_v1.types.Violation.Remediation.Instructions.Gcloud): - Remediation instructions to resolve violation - via gcloud cli - console_instructions (google.cloud.assuredworkloads_v1.types.Violation.Remediation.Instructions.Console): - Remediation instructions to resolve violation - via cloud console - """ - - class Gcloud(proto.Message): - r"""Remediation instructions to resolve violation via gcloud cli - - Attributes: - gcloud_commands (MutableSequence[str]): - Gcloud command to resolve violation - steps (MutableSequence[str]): - Steps to resolve violation via gcloud cli - additional_links (MutableSequence[str]): - Additional urls for more information about - steps - """ - - gcloud_commands: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - steps: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - additional_links: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - class Console(proto.Message): - r"""Remediation instructions to resolve violation via cloud - console - - Attributes: - console_uris (MutableSequence[str]): - Link to console page where violations can be - resolved - steps (MutableSequence[str]): - Steps to resolve violation via cloud console - additional_links (MutableSequence[str]): - Additional urls for more information about - steps - """ - - console_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - steps: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - additional_links: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - gcloud_instructions: 'Violation.Remediation.Instructions.Gcloud' = proto.Field( - proto.MESSAGE, - number=1, - message='Violation.Remediation.Instructions.Gcloud', - ) - console_instructions: 'Violation.Remediation.Instructions.Console' = proto.Field( - proto.MESSAGE, - number=2, - message='Violation.Remediation.Instructions.Console', - ) - - instructions: 'Violation.Remediation.Instructions' = proto.Field( - proto.MESSAGE, - number=1, - message='Violation.Remediation.Instructions', - ) - compliant_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - remediation_type: 'Violation.Remediation.RemediationType' = proto.Field( - proto.ENUM, - number=3, - enum='Violation.Remediation.RemediationType', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - begin_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - resolve_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - category: str = proto.Field( - proto.STRING, - number=6, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - org_policy_constraint: str = proto.Field( - proto.STRING, - number=8, - ) - audit_log_link: str = proto.Field( - proto.STRING, - number=11, - ) - non_compliant_org_policy: str = proto.Field( - proto.STRING, - number=12, - ) - remediation: Remediation = proto.Field( - proto.MESSAGE, - number=13, - message=Remediation, - ) - acknowledged: bool = proto.Field( - proto.BOOL, - number=14, - ) - acknowledgement_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=15, - optional=True, - message=timestamp_pb2.Timestamp, - ) - exception_audit_log_link: str = proto.Field( - proto.STRING, - number=16, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/mypy.ini b/owl-bot-staging/google-cloud-assured-workloads/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/noxfile.py b/owl-bot-staging/google-cloud-assured-workloads/v1/noxfile.py deleted file mode 100644 index eb6f5f93bda4..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-assured-workloads' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/assuredworkloads_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/assuredworkloads_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_async.py deleted file mode 100644 index d35782fbe72d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AcknowledgeViolation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_AcknowledgeViolation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_acknowledge_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.AcknowledgeViolationRequest( - name="name_value", - comment="comment_value", - ) - - # Make the request - response = await client.acknowledge_violation(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_AcknowledgeViolation_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_sync.py deleted file mode 100644 index ac58e034ffa2..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AcknowledgeViolation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_AcknowledgeViolation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_acknowledge_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.AcknowledgeViolationRequest( - name="name_value", - comment="comment_value", - ) - - # Make the request - response = client.acknowledge_violation(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_AcknowledgeViolation_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py deleted file mode 100644 index bc5c295b66d2..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_create_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_sync.py deleted file mode 100644 index 7888daf7d668..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_create_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_delete_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_delete_workload_async.py deleted file mode 100644 index 1e64f18aed87..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_delete_workload_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_DeleteWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - await client.delete_workload(request=request) - - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_DeleteWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_delete_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_delete_workload_sync.py deleted file mode 100644 index f7c33e4d14fe..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_delete_workload_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_DeleteWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - client.delete_workload(request=request) - - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_DeleteWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_violation_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_violation_async.py deleted file mode 100644 index b5c50d4f5c28..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_violation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetViolation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_GetViolation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_get_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetViolationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_violation(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_GetViolation_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_violation_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_violation_sync.py deleted file mode 100644 index 9ea1790a5ec7..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_violation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetViolation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_GetViolation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_get_violation(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetViolationRequest( - name="name_value", - ) - - # Make the request - response = client.get_violation(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_GetViolation_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_workload_async.py deleted file mode 100644 index ea81052c8dd8..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_workload_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_GetWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_get_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = await client.get_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_GetWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_workload_sync.py deleted file mode 100644 index 75ef7b3b45d9..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_get_workload_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_GetWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_get_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = client.get_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_GetWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_violations_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_violations_async.py deleted file mode 100644 index d6500b89a6a0..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_violations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListViolations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_ListViolations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_list_violations(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListViolationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_violations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_ListViolations_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_violations_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_violations_sync.py deleted file mode 100644 index d065de414b69..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_violations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListViolations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_ListViolations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_list_violations(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListViolationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_violations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_ListViolations_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_workloads_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_workloads_async.py deleted file mode 100644 index fcb53e6af444..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_workloads_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkloads -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_ListWorkloads_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_ListWorkloads_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_workloads_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_workloads_sync.py deleted file mode 100644 index 218313b45026..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_list_workloads_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkloads -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_ListWorkloads_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_ListWorkloads_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_async.py deleted file mode 100644 index c250b4464cf0..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestrictAllowedResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_RestrictAllowedResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = await client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_RestrictAllowedResources_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_sync.py deleted file mode 100644 index 383f634370a4..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestrictAllowedResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_RestrictAllowedResources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_RestrictAllowedResources_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_update_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_update_workload_async.py deleted file mode 100644 index 9e1f9f5a549b..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_update_workload_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_UpdateWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -async def sample_update_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = await client.update_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_UpdateWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_update_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_update_workload_sync.py deleted file mode 100644 index 0eb7ada129fd..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_update_workload_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1_generated_AssuredWorkloadsService_UpdateWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1 - - -def sample_update_workload(): - # Create a client - client = assuredworkloads_v1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "ASSURED_WORKLOADS_FOR_PARTNERS" - - request = assuredworkloads_v1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = client.update_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1_generated_AssuredWorkloadsService_UpdateWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json b/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json deleted file mode 100644 index 15061dde8248..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json +++ /dev/null @@ -1,1458 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.assuredworkloads.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-assured-workloads", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.acknowledge_violation", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.AcknowledgeViolation", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "AcknowledgeViolation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.AcknowledgeViolationRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.AcknowledgeViolationResponse", - "shortName": "acknowledge_violation" - }, - "description": "Sample for AcknowledgeViolation", - "file": "assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_AcknowledgeViolation_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.acknowledge_violation", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.AcknowledgeViolation", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "AcknowledgeViolation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.AcknowledgeViolationRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.AcknowledgeViolationResponse", - "shortName": "acknowledge_violation" - }, - "description": "Sample for AcknowledgeViolation", - "file": "assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_AcknowledgeViolation_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_acknowledge_violation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.create_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.CreateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "CreateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.CreateWorkloadRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1.types.Workload" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_workload" - }, - "description": "Sample for CreateWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.create_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.CreateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "CreateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.CreateWorkloadRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1.types.Workload" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_workload" - }, - "description": "Sample for CreateWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_create_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_create_workload_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.delete_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.DeleteWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "DeleteWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.DeleteWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_workload" - }, - "description": "Sample for DeleteWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_delete_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_DeleteWorkload_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_delete_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.delete_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.DeleteWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "DeleteWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.DeleteWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_workload" - }, - "description": "Sample for DeleteWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_delete_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_DeleteWorkload_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_delete_workload_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.get_violation", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.GetViolation", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "GetViolation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.GetViolationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.Violation", - "shortName": "get_violation" - }, - "description": "Sample for GetViolation", - "file": "assuredworkloads_v1_generated_assured_workloads_service_get_violation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_GetViolation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_get_violation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.get_violation", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.GetViolation", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "GetViolation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.GetViolationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.Violation", - "shortName": "get_violation" - }, - "description": "Sample for GetViolation", - "file": "assuredworkloads_v1_generated_assured_workloads_service_get_violation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_GetViolation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_get_violation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.get_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.GetWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "GetWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.GetWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.Workload", - "shortName": "get_workload" - }, - "description": "Sample for GetWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_get_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_GetWorkload_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_get_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.get_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.GetWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "GetWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.GetWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.Workload", - "shortName": "get_workload" - }, - "description": "Sample for GetWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_get_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_GetWorkload_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_get_workload_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.list_violations", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.ListViolations", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "ListViolations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.ListViolationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListViolationsAsyncPager", - "shortName": "list_violations" - }, - "description": "Sample for ListViolations", - "file": "assuredworkloads_v1_generated_assured_workloads_service_list_violations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_ListViolations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_list_violations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.list_violations", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.ListViolations", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "ListViolations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.ListViolationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListViolationsPager", - "shortName": "list_violations" - }, - "description": "Sample for ListViolations", - "file": "assuredworkloads_v1_generated_assured_workloads_service_list_violations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_ListViolations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_list_violations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.list_workloads", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.ListWorkloads", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "ListWorkloads" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager", - "shortName": "list_workloads" - }, - "description": "Sample for ListWorkloads", - "file": "assuredworkloads_v1_generated_assured_workloads_service_list_workloads_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_ListWorkloads_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_list_workloads_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.list_workloads", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.ListWorkloads", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "ListWorkloads" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.ListWorkloadsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.services.assured_workloads_service.pagers.ListWorkloadsPager", - "shortName": "list_workloads" - }, - "description": "Sample for ListWorkloads", - "file": "assuredworkloads_v1_generated_assured_workloads_service_list_workloads_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_ListWorkloads_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_list_workloads_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.restrict_allowed_resources", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.RestrictAllowedResources", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "RestrictAllowedResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesResponse", - "shortName": "restrict_allowed_resources" - }, - "description": "Sample for RestrictAllowedResources", - "file": "assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_RestrictAllowedResources_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.restrict_allowed_resources", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.RestrictAllowedResources", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "RestrictAllowedResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.RestrictAllowedResourcesResponse", - "shortName": "restrict_allowed_resources" - }, - "description": "Sample for RestrictAllowedResources", - "file": "assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_RestrictAllowedResources_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_restrict_allowed_resources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient.update_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.UpdateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "UpdateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.UpdateWorkloadRequest" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1.types.Workload" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.Workload", - "shortName": "update_workload" - }, - "description": "Sample for UpdateWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_update_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_UpdateWorkload_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_update_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1.AssuredWorkloadsServiceClient.update_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService.UpdateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "UpdateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1.types.UpdateWorkloadRequest" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1.types.Workload" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1.types.Workload", - "shortName": "update_workload" - }, - "description": "Sample for UpdateWorkload", - "file": "assuredworkloads_v1_generated_assured_workloads_service_update_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1_generated_AssuredWorkloadsService_UpdateWorkload_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1_generated_assured_workloads_service_update_workload_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/scripts/fixup_assuredworkloads_v1_keywords.py b/owl-bot-staging/google-cloud-assured-workloads/v1/scripts/fixup_assuredworkloads_v1_keywords.py deleted file mode 100644 index bba2ef251532..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/scripts/fixup_assuredworkloads_v1_keywords.py +++ /dev/null @@ -1,184 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assuredworkloadsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge_violation': ('name', 'comment', 'non_compliant_org_policy', ), - 'create_workload': ('parent', 'workload', 'external_id', ), - 'delete_workload': ('name', 'etag', ), - 'get_violation': ('name', ), - 'get_workload': ('name', ), - 'list_violations': ('parent', 'interval', 'page_size', 'page_token', 'filter', ), - 'list_workloads': ('parent', 'page_size', 'page_token', 'filter', ), - 'restrict_allowed_resources': ('name', 'restriction_type', ), - 'update_workload': ('workload', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assuredworkloadsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the assuredworkloads client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/setup.py b/owl-bot-staging/google-cloud-assured-workloads/v1/setup.py deleted file mode 100644 index d6c9dac04ce9..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-assured-workloads' - - -description = "Google Cloud Assured Workloads API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/assuredworkloads/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/assuredworkloads_v1/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/assuredworkloads_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/assuredworkloads_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py b/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py deleted file mode 100644 index a479ea7a6e91..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1/tests/unit/gapic/assuredworkloads_v1/test_assured_workloads_service.py +++ /dev/null @@ -1,7497 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.assuredworkloads_v1.services.assured_workloads_service import AssuredWorkloadsServiceAsyncClient -from google.cloud.assuredworkloads_v1.services.assured_workloads_service import AssuredWorkloadsServiceClient -from google.cloud.assuredworkloads_v1.services.assured_workloads_service import pagers -from google.cloud.assuredworkloads_v1.services.assured_workloads_service import transports -from google.cloud.assuredworkloads_v1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(None) is None - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssuredWorkloadsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AssuredWorkloadsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AssuredWorkloadsServiceClient._get_client_cert_source(None, False) is None - assert AssuredWorkloadsServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssuredWorkloadsServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssuredWorkloadsServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssuredWorkloadsServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AssuredWorkloadsServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AssuredWorkloadsServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AssuredWorkloadsServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssuredWorkloadsServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssuredWorkloadsServiceClient._get_universe_domain(None, None) == AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AssuredWorkloadsServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AssuredWorkloadsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AssuredWorkloadsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssuredWorkloadsServiceClient, "grpc"), - (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, "rest"), -]) -def test_assured_workloads_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://assuredworkloads.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), - (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssuredWorkloadsServiceRestTransport, "rest"), -]) -def test_assured_workloads_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssuredWorkloadsServiceClient, "grpc"), - (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, "rest"), -]) -def test_assured_workloads_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://assuredworkloads.googleapis.com' - ) - - -def test_assured_workloads_service_client_get_transport_class(): - transport = AssuredWorkloadsServiceClient.get_transport_class() - available_transports = [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceRestTransport, - ] - assert transport in available_transports - - transport = AssuredWorkloadsServiceClient.get_transport_class("grpc") - assert transport == transports.AssuredWorkloadsServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest"), -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -def test_assured_workloads_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssuredWorkloadsServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssuredWorkloadsServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", "true"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", "false"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest", "true"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_assured_workloads_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssuredWorkloadsServiceAsyncClient)) -def test_assured_workloads_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -def test_assured_workloads_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest"), -]) -def test_assured_workloads_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", grpc_helpers), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest", None), -]) -def test_assured_workloads_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_assured_workloads_service_client_client_options_from_dict(): - with mock.patch('google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AssuredWorkloadsServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", grpc_helpers), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_assured_workloads_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "assuredworkloads.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="assuredworkloads.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.CreateWorkloadRequest, - dict, -]) -def test_create_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.CreateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.CreateWorkloadRequest( - parent='parent_value', - external_id='external_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.CreateWorkloadRequest( - parent='parent_value', - external_id='external_id_value', - ) - -def test_create_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_workload] = mock_rpc - request = {} - client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_workload] = mock_rpc - - request = {} - await client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.CreateWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.CreateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_workload_async_from_dict(): - await test_create_workload_async(request_type=dict) - -def test_create_workload_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.CreateWorkloadRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_workload_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.CreateWorkloadRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_workload( - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - - -def test_create_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_workload( - assuredworkloads.CreateWorkloadRequest(), - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_workload( - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_workload( - assuredworkloads.CreateWorkloadRequest(), - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.UpdateWorkloadRequest, - dict, -]) -def test_update_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - ) - response = client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.UpdateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS - - -def test_update_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.UpdateWorkloadRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.UpdateWorkloadRequest( - ) - -def test_update_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_workload] = mock_rpc - request = {} - client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_workload] = mock_rpc - - request = {} - await client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.UpdateWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - )) - response = await client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.UpdateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS - - -@pytest.mark.asyncio -async def test_update_workload_async_from_dict(): - await test_update_workload_async(request_type=dict) - -def test_update_workload_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.UpdateWorkloadRequest() - - request.workload.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - call.return_value = assuredworkloads.Workload() - client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'workload.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_workload_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.UpdateWorkloadRequest() - - request.workload.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload()) - await client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'workload.name=name_value', - ) in kw['metadata'] - - -def test_update_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_workload( - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_workload( - assuredworkloads.UpdateWorkloadRequest(), - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_workload( - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_workload( - assuredworkloads.UpdateWorkloadRequest(), - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.RestrictAllowedResourcesRequest, - dict, -]) -def test_restrict_allowed_resources(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.RestrictAllowedResourcesResponse( - ) - response = client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.RestrictAllowedResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) - - -def test_restrict_allowed_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.RestrictAllowedResourcesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.restrict_allowed_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest( - name='name_value', - ) - -def test_restrict_allowed_resources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restrict_allowed_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restrict_allowed_resources] = mock_rpc - request = {} - client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.restrict_allowed_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.restrict_allowed_resources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.restrict_allowed_resources] = mock_rpc - - request = {} - await client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.restrict_allowed_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.RestrictAllowedResourcesRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.RestrictAllowedResourcesResponse( - )) - response = await client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.RestrictAllowedResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) - - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_async_from_dict(): - await test_restrict_allowed_resources_async(request_type=dict) - -def test_restrict_allowed_resources_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.RestrictAllowedResourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() - client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.RestrictAllowedResourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.RestrictAllowedResourcesResponse()) - await client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.DeleteWorkloadRequest, - dict, -]) -def test_delete_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.DeleteWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.DeleteWorkloadRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.DeleteWorkloadRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_workload] = mock_rpc - request = {} - client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_workload] = mock_rpc - - request = {} - await client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.DeleteWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.DeleteWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_workload_async_from_dict(): - await test_delete_workload_async(request_type=dict) - -def test_delete_workload_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.DeleteWorkloadRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value = None - client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_workload_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.DeleteWorkloadRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_workload( - assuredworkloads.DeleteWorkloadRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_workload( - assuredworkloads.DeleteWorkloadRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.GetWorkloadRequest, - dict, -]) -def test_get_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - ) - response = client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.GetWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS - - -def test_get_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.GetWorkloadRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.GetWorkloadRequest( - name='name_value', - ) - -def test_get_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_workload] = mock_rpc - request = {} - client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_workload] = mock_rpc - - request = {} - await client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.GetWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - )) - response = await client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.GetWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS - - -@pytest.mark.asyncio -async def test_get_workload_async_from_dict(): - await test_get_workload_async(request_type=dict) - -def test_get_workload_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.GetWorkloadRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - call.return_value = assuredworkloads.Workload() - client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_workload_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.GetWorkloadRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload()) - await client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_workload( - assuredworkloads.GetWorkloadRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_workload( - assuredworkloads.GetWorkloadRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.ListWorkloadsRequest, - dict, -]) -def test_list_workloads(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.ListWorkloadsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkloadsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_workloads_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.ListWorkloadsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_workloads(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.ListWorkloadsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_workloads_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_workloads in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_workloads] = mock_rpc - request = {} - client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_workloads(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_workloads_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_workloads in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_workloads] = mock_rpc - - request = {} - await client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_workloads(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_workloads_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.ListWorkloadsRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.ListWorkloadsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkloadsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_workloads_async_from_dict(): - await test_list_workloads_async(request_type=dict) - -def test_list_workloads_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.ListWorkloadsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - call.return_value = assuredworkloads.ListWorkloadsResponse() - client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_workloads_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.ListWorkloadsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse()) - await client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_workloads_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListWorkloadsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_workloads( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_workloads_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_workloads( - assuredworkloads.ListWorkloadsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_workloads_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListWorkloadsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_workloads( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_workloads_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_workloads( - assuredworkloads.ListWorkloadsRequest(), - parent='parent_value', - ) - - -def test_list_workloads_pager(transport_name: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_workloads(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assuredworkloads.Workload) - for i in results) -def test_list_workloads_pages(transport_name: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - pages = list(client.list_workloads(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_workloads_async_pager(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_workloads(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assuredworkloads.Workload) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_workloads_async_pages(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_workloads(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.ListViolationsRequest, - dict, -]) -def test_list_violations(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListViolationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_violations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.ListViolationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViolationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_violations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.ListViolationsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_violations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.ListViolationsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_violations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_violations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_violations] = mock_rpc - request = {} - client.list_violations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_violations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_violations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_violations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_violations] = mock_rpc - - request = {} - await client.list_violations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_violations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_violations_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.ListViolationsRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListViolationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_violations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.ListViolationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViolationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_violations_async_from_dict(): - await test_list_violations_async(request_type=dict) - - -def test_list_violations_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListViolationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_violations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_violations_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_violations( - assuredworkloads.ListViolationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_violations_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListViolationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListViolationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_violations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_violations_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_violations( - assuredworkloads.ListViolationsRequest(), - parent='parent_value', - ) - - -def test_list_violations_pager(transport_name: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - next_page_token='abc', - ), - assuredworkloads.ListViolationsResponse( - violations=[], - next_page_token='def', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - pager = client.list_violations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assuredworkloads.Violation) - for i in results) -def test_list_violations_pages(transport_name: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - next_page_token='abc', - ), - assuredworkloads.ListViolationsResponse( - violations=[], - next_page_token='def', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_violations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_violations_async_pager(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - next_page_token='abc', - ), - assuredworkloads.ListViolationsResponse( - violations=[], - next_page_token='def', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_violations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assuredworkloads.Violation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_violations_async_pages(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - next_page_token='abc', - ), - assuredworkloads.ListViolationsResponse( - violations=[], - next_page_token='def', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListViolationsResponse( - violations=[ - assuredworkloads.Violation(), - assuredworkloads.Violation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_violations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.GetViolationRequest, - dict, -]) -def test_get_violation(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Violation( - name='name_value', - description='description_value', - category='category_value', - state=assuredworkloads.Violation.State.RESOLVED, - org_policy_constraint='org_policy_constraint_value', - audit_log_link='audit_log_link_value', - non_compliant_org_policy='non_compliant_org_policy_value', - acknowledged=True, - exception_audit_log_link='exception_audit_log_link_value', - ) - response = client.get_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.GetViolationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Violation) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.category == 'category_value' - assert response.state == assuredworkloads.Violation.State.RESOLVED - assert response.org_policy_constraint == 'org_policy_constraint_value' - assert response.audit_log_link == 'audit_log_link_value' - assert response.non_compliant_org_policy == 'non_compliant_org_policy_value' - assert response.acknowledged is True - assert response.exception_audit_log_link == 'exception_audit_log_link_value' - - -def test_get_violation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.GetViolationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_violation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.GetViolationRequest( - name='name_value', - ) - -def test_get_violation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_violation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_violation] = mock_rpc - request = {} - client.get_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_violation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_violation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_violation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_violation] = mock_rpc - - request = {} - await client.get_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_violation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_violation_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.GetViolationRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Violation( - name='name_value', - description='description_value', - category='category_value', - state=assuredworkloads.Violation.State.RESOLVED, - org_policy_constraint='org_policy_constraint_value', - audit_log_link='audit_log_link_value', - non_compliant_org_policy='non_compliant_org_policy_value', - acknowledged=True, - exception_audit_log_link='exception_audit_log_link_value', - )) - response = await client.get_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.GetViolationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Violation) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.category == 'category_value' - assert response.state == assuredworkloads.Violation.State.RESOLVED - assert response.org_policy_constraint == 'org_policy_constraint_value' - assert response.audit_log_link == 'audit_log_link_value' - assert response.non_compliant_org_policy == 'non_compliant_org_policy_value' - assert response.acknowledged is True - assert response.exception_audit_log_link == 'exception_audit_log_link_value' - - -@pytest.mark.asyncio -async def test_get_violation_async_from_dict(): - await test_get_violation_async(request_type=dict) - - -def test_get_violation_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Violation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_violation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_violation_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_violation( - assuredworkloads.GetViolationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_violation_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Violation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Violation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_violation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_violation_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_violation( - assuredworkloads.GetViolationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.AcknowledgeViolationRequest, - dict, -]) -def test_acknowledge_violation(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.AcknowledgeViolationResponse( - ) - response = client.acknowledge_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.AcknowledgeViolationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.AcknowledgeViolationResponse) - - -def test_acknowledge_violation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.AcknowledgeViolationRequest( - name='name_value', - comment='comment_value', - non_compliant_org_policy='non_compliant_org_policy_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_violation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.acknowledge_violation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.AcknowledgeViolationRequest( - name='name_value', - comment='comment_value', - non_compliant_org_policy='non_compliant_org_policy_value', - ) - -def test_acknowledge_violation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.acknowledge_violation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.acknowledge_violation] = mock_rpc - request = {} - client.acknowledge_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.acknowledge_violation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_acknowledge_violation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.acknowledge_violation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.acknowledge_violation] = mock_rpc - - request = {} - await client.acknowledge_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.acknowledge_violation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_acknowledge_violation_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.AcknowledgeViolationRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.AcknowledgeViolationResponse( - )) - response = await client.acknowledge_violation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.AcknowledgeViolationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.AcknowledgeViolationResponse) - - -@pytest.mark.asyncio -async def test_acknowledge_violation_async_from_dict(): - await test_acknowledge_violation_async(request_type=dict) - - -def test_create_workload_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_workload] = mock_rpc - - request = {} - client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_workload_rest_required_fields(request_type=assuredworkloads.CreateWorkloadRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_workload._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("external_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_workload(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_workload_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(("externalId", )) & set(("parent", "workload", ))) - - -def test_create_workload_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_workload(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=organizations/*/locations/*}/workloads" % client.transport._host, args[1]) - - -def test_create_workload_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_workload( - assuredworkloads.CreateWorkloadRequest(), - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - -def test_update_workload_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_workload] = mock_rpc - - request = {} - client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_workload_rest_required_fields(request_type=assuredworkloads.UpdateWorkloadRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_workload._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.Workload() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_workload(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_workload_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("workload", "updateMask", ))) - - -def test_update_workload_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.Workload() - - # get arguments that satisfy an http rule for this method - sample_request = {'workload': {'name': 'organizations/sample1/locations/sample2/workloads/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = assuredworkloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_workload(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{workload.name=organizations/*/locations/*/workloads/*}" % client.transport._host, args[1]) - - -def test_update_workload_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_workload( - assuredworkloads.UpdateWorkloadRequest(), - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_restrict_allowed_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restrict_allowed_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restrict_allowed_resources] = mock_rpc - - request = {} - client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.restrict_allowed_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restrict_allowed_resources_rest_required_fields(request_type=assuredworkloads.RestrictAllowedResourcesRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restrict_allowed_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restrict_allowed_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.RestrictAllowedResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restrict_allowed_resources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restrict_allowed_resources_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restrict_allowed_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "restrictionType", ))) - - -def test_delete_workload_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_workload] = mock_rpc - - request = {} - client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_workload_rest_required_fields(request_type=assuredworkloads.DeleteWorkloadRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_workload._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_workload(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_workload_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_workload_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_workload(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=organizations/*/locations/*/workloads/*}" % client.transport._host, args[1]) - - -def test_delete_workload_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_workload( - assuredworkloads.DeleteWorkloadRequest(), - name='name_value', - ) - - -def test_get_workload_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_workload] = mock_rpc - - request = {} - client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_workload_rest_required_fields(request_type=assuredworkloads.GetWorkloadRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.Workload() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_workload(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_workload_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_workload_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.Workload() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = assuredworkloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_workload(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=organizations/*/locations/*/workloads/*}" % client.transport._host, args[1]) - - -def test_get_workload_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_workload( - assuredworkloads.GetWorkloadRequest(), - name='name_value', - ) - - -def test_list_workloads_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_workloads in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_workloads] = mock_rpc - - request = {} - client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_workloads(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_workloads_rest_required_fields(request_type=assuredworkloads.ListWorkloadsRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_workloads._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_workloads._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.ListWorkloadsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.ListWorkloadsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_workloads(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_workloads_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_workloads._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_workloads_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.ListWorkloadsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = assuredworkloads.ListWorkloadsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_workloads(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=organizations/*/locations/*}/workloads" % client.transport._host, args[1]) - - -def test_list_workloads_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_workloads( - assuredworkloads.ListWorkloadsRequest(), - parent='parent_value', - ) - - -def test_list_workloads_rest_pager(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(assuredworkloads.ListWorkloadsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - pager = client.list_workloads(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assuredworkloads.Workload) - for i in results) - - pages = list(client.list_workloads(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_violations_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.ListViolationsRequest() - with pytest.raises(RuntimeError): - client.list_violations(request) - - -def test_get_violation_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.GetViolationRequest() - with pytest.raises(RuntimeError): - client.get_violation(request) - - -def test_acknowledge_violation_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.AcknowledgeViolationRequest() - with pytest.raises(RuntimeError): - client.acknowledge_violation(request) - - -def test_list_violations_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.list_violations({}) - assert ( - "Method ListViolations is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_get_violation_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.get_violation({}) - assert ( - "Method GetViolation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_acknowledge_violation_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.acknowledge_violation({}) - assert ( - "Method AcknowledgeViolation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, - transports.AssuredWorkloadsServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AssuredWorkloadsServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.CreateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - call.return_value = assuredworkloads.Workload() - client.update_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.UpdateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restrict_allowed_resources_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() - client.restrict_allowed_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.RestrictAllowedResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value = None - client.delete_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.DeleteWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - call.return_value = assuredworkloads.Workload() - client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_workloads_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - call.return_value = assuredworkloads.ListWorkloadsResponse() - client.list_workloads(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListWorkloadsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_violations_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - call.return_value = assuredworkloads.ListViolationsResponse() - client.list_violations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListViolationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_violation_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - call.return_value = assuredworkloads.Violation() - client.get_violation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetViolationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_acknowledge_violation_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_violation), - '__call__') as call: - call.return_value = assuredworkloads.AcknowledgeViolationResponse() - client.acknowledge_violation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.AcknowledgeViolationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AssuredWorkloadsServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.CreateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - )) - await client.update_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.UpdateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restrict_allowed_resources_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.RestrictAllowedResourcesResponse( - )) - await client.restrict_allowed_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.RestrictAllowedResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.DeleteWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - )) - await client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_workloads_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - )) - await client.list_workloads(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListWorkloadsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_violations_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListViolationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_violations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListViolationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_violation_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Violation( - name='name_value', - description='description_value', - category='category_value', - state=assuredworkloads.Violation.State.RESOLVED, - org_policy_constraint='org_policy_constraint_value', - audit_log_link='audit_log_link_value', - non_compliant_org_policy='non_compliant_org_policy_value', - acknowledged=True, - exception_audit_log_link='exception_audit_log_link_value', - )) - await client.get_violation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetViolationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_acknowledge_violation_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_violation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.AcknowledgeViolationResponse( - )) - await client.acknowledge_violation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.AcknowledgeViolationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AssuredWorkloadsServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_workload_rest_bad_request(request_type=assuredworkloads.CreateWorkloadRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_workload(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.CreateWorkloadRequest, - dict, -]) -def test_create_workload_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request_init["workload"] = {'name': 'name_value', 'display_name': 'display_name_value', 'resources': [{'resource_id': 1172, 'resource_type': 1}], 'compliance_regime': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'billing_account': 'billing_account_value', 'etag': 'etag_value', 'labels': {}, 'provisioned_resources_parent': 'provisioned_resources_parent_value', 'kms_settings': {'next_rotation_time': {}, 'rotation_period': {'seconds': 751, 'nanos': 543}}, 'resource_settings': [{'resource_id': 'resource_id_value', 'resource_type': 1, 'display_name': 'display_name_value'}], 'kaj_enrollment_state': 1, 'enable_sovereign_controls': True, 'saa_enrollment_response': {'setup_status': 1, 'setup_errors': [1]}, 'compliant_but_disallowed_services': ['compliant_but_disallowed_services_value1', 'compliant_but_disallowed_services_value2'], 'partner': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = assuredworkloads.CreateWorkloadRequest.meta.fields["workload"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["workload"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["workload"][field])): - del request_init["workload"][field][i][subfield] - else: - del request_init["workload"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_workload(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_workload_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_create_workload") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.CreateWorkloadRequest.pb(assuredworkloads.CreateWorkloadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = assuredworkloads.CreateWorkloadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_workload(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_workload_rest_bad_request(request_type=assuredworkloads.UpdateWorkloadRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'workload': {'name': 'organizations/sample1/locations/sample2/workloads/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_workload(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.UpdateWorkloadRequest, - dict, -]) -def test_update_workload_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'workload': {'name': 'organizations/sample1/locations/sample2/workloads/sample3'}} - request_init["workload"] = {'name': 'organizations/sample1/locations/sample2/workloads/sample3', 'display_name': 'display_name_value', 'resources': [{'resource_id': 1172, 'resource_type': 1}], 'compliance_regime': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'billing_account': 'billing_account_value', 'etag': 'etag_value', 'labels': {}, 'provisioned_resources_parent': 'provisioned_resources_parent_value', 'kms_settings': {'next_rotation_time': {}, 'rotation_period': {'seconds': 751, 'nanos': 543}}, 'resource_settings': [{'resource_id': 'resource_id_value', 'resource_type': 1, 'display_name': 'display_name_value'}], 'kaj_enrollment_state': 1, 'enable_sovereign_controls': True, 'saa_enrollment_response': {'setup_status': 1, 'setup_errors': [1]}, 'compliant_but_disallowed_services': ['compliant_but_disallowed_services_value1', 'compliant_but_disallowed_services_value2'], 'partner': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = assuredworkloads.UpdateWorkloadRequest.meta.fields["workload"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["workload"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["workload"][field])): - del request_init["workload"][field][i][subfield] - else: - del request_init["workload"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_workload(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_workload_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_update_workload") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_update_workload_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_update_workload") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.UpdateWorkloadRequest.pb(assuredworkloads.UpdateWorkloadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = assuredworkloads.Workload.to_json(assuredworkloads.Workload()) - req.return_value.content = return_value - - request = assuredworkloads.UpdateWorkloadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = assuredworkloads.Workload() - post_with_metadata.return_value = assuredworkloads.Workload(), metadata - - client.update_workload(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_restrict_allowed_resources_rest_bad_request(request_type=assuredworkloads.RestrictAllowedResourcesRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restrict_allowed_resources(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.RestrictAllowedResourcesRequest, - dict, -]) -def test_restrict_allowed_resources_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.RestrictAllowedResourcesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restrict_allowed_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restrict_allowed_resources_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_restrict_allowed_resources") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_restrict_allowed_resources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_restrict_allowed_resources") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.RestrictAllowedResourcesRequest.pb(assuredworkloads.RestrictAllowedResourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = assuredworkloads.RestrictAllowedResourcesResponse.to_json(assuredworkloads.RestrictAllowedResourcesResponse()) - req.return_value.content = return_value - - request = assuredworkloads.RestrictAllowedResourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = assuredworkloads.RestrictAllowedResourcesResponse() - post_with_metadata.return_value = assuredworkloads.RestrictAllowedResourcesResponse(), metadata - - client.restrict_allowed_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_workload_rest_bad_request(request_type=assuredworkloads.DeleteWorkloadRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_workload(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.DeleteWorkloadRequest, - dict, -]) -def test_delete_workload_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_workload(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_workload_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_delete_workload") as pre: - pre.assert_not_called() - pb_message = assuredworkloads.DeleteWorkloadRequest.pb(assuredworkloads.DeleteWorkloadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = assuredworkloads.DeleteWorkloadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_workload(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_workload_rest_bad_request(request_type=assuredworkloads.GetWorkloadRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_workload(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.GetWorkloadRequest, - dict, -]) -def test_get_workload_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - partner=assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.Workload.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_workload(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - assert response.partner == assuredworkloads.Workload.Partner.LOCAL_CONTROLS_BY_S3NS - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_workload_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_get_workload") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_get_workload_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_get_workload") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.GetWorkloadRequest.pb(assuredworkloads.GetWorkloadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = assuredworkloads.Workload.to_json(assuredworkloads.Workload()) - req.return_value.content = return_value - - request = assuredworkloads.GetWorkloadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = assuredworkloads.Workload() - post_with_metadata.return_value = assuredworkloads.Workload(), metadata - - client.get_workload(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_workloads_rest_bad_request(request_type=assuredworkloads.ListWorkloadsRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_workloads(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.ListWorkloadsRequest, - dict, -]) -def test_list_workloads_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.ListWorkloadsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_workloads(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkloadsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_workloads_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_list_workloads") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_list_workloads_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_list_workloads") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.ListWorkloadsRequest.pb(assuredworkloads.ListWorkloadsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = assuredworkloads.ListWorkloadsResponse.to_json(assuredworkloads.ListWorkloadsResponse()) - req.return_value.content = return_value - - request = assuredworkloads.ListWorkloadsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = assuredworkloads.ListWorkloadsResponse() - post_with_metadata.return_value = assuredworkloads.ListWorkloadsResponse(), metadata - - client.list_workloads(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_violations_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.list_violations({}) - assert ( - "Method ListViolations is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_get_violation_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.get_violation({}) - assert ( - "Method GetViolation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_acknowledge_violation_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.acknowledge_violation({}) - assert ( - "Method AcknowledgeViolation is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'organizations/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'organizations/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'organizations/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - client.create_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.CreateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - client.update_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.UpdateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restrict_allowed_resources_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - client.restrict_allowed_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.RestrictAllowedResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - client.delete_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.DeleteWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_workloads_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - client.list_workloads(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListWorkloadsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_violations_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_violations), - '__call__') as call: - client.list_violations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListViolationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_violation_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_violation), - '__call__') as call: - client.get_violation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetViolationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_acknowledge_violation_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge_violation), - '__call__') as call: - client.acknowledge_violation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.AcknowledgeViolationRequest() - - assert args[0] == request_msg - - -def test_assured_workloads_service_rest_lro_client(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AssuredWorkloadsServiceGrpcTransport, - ) - -def test_assured_workloads_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AssuredWorkloadsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_assured_workloads_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AssuredWorkloadsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_workload', - 'update_workload', - 'restrict_allowed_resources', - 'delete_workload', - 'get_workload', - 'list_workloads', - 'list_violations', - 'get_violation', - 'acknowledge_violation', - 'get_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_assured_workloads_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssuredWorkloadsServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_assured_workloads_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.assuredworkloads_v1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssuredWorkloadsServiceTransport() - adc.assert_called_once() - - -def test_assured_workloads_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssuredWorkloadsServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, - ], -) -def test_assured_workloads_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, - transports.AssuredWorkloadsServiceRestTransport, - ], -) -def test_assured_workloads_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AssuredWorkloadsServiceGrpcTransport, grpc_helpers), - (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_assured_workloads_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "assuredworkloads.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="assuredworkloads.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AssuredWorkloadsServiceGrpcTransport, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport]) -def test_assured_workloads_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_assured_workloads_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssuredWorkloadsServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_assured_workloads_service_host_no_port(transport_name): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='assuredworkloads.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://assuredworkloads.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_assured_workloads_service_host_with_port(transport_name): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='assuredworkloads.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://assuredworkloads.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_assured_workloads_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AssuredWorkloadsServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AssuredWorkloadsServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_workload._session - session2 = client2.transport.create_workload._session - assert session1 != session2 - session1 = client1.transport.update_workload._session - session2 = client2.transport.update_workload._session - assert session1 != session2 - session1 = client1.transport.restrict_allowed_resources._session - session2 = client2.transport.restrict_allowed_resources._session - assert session1 != session2 - session1 = client1.transport.delete_workload._session - session2 = client2.transport.delete_workload._session - assert session1 != session2 - session1 = client1.transport.get_workload._session - session2 = client2.transport.get_workload._session - assert session1 != session2 - session1 = client1.transport.list_workloads._session - session2 = client2.transport.list_workloads._session - assert session1 != session2 - session1 = client1.transport.list_violations._session - session2 = client2.transport.list_violations._session - assert session1 != session2 - session1 = client1.transport.get_violation._session - session2 = client2.transport.get_violation._session - assert session1 != session2 - session1 = client1.transport.acknowledge_violation._session - session2 = client2.transport.acknowledge_violation._session - assert session1 != session2 -def test_assured_workloads_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_assured_workloads_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssuredWorkloadsServiceGrpcTransport, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport]) -def test_assured_workloads_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssuredWorkloadsServiceGrpcTransport, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport]) -def test_assured_workloads_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_assured_workloads_service_grpc_lro_client(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_assured_workloads_service_grpc_lro_async_client(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_violation_path(): - organization = "squid" - location = "clam" - workload = "whelk" - violation = "octopus" - expected = "organizations/{organization}/locations/{location}/workloads/{workload}/violations/{violation}".format(organization=organization, location=location, workload=workload, violation=violation, ) - actual = AssuredWorkloadsServiceClient.violation_path(organization, location, workload, violation) - assert expected == actual - - -def test_parse_violation_path(): - expected = { - "organization": "oyster", - "location": "nudibranch", - "workload": "cuttlefish", - "violation": "mussel", - } - path = AssuredWorkloadsServiceClient.violation_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_violation_path(path) - assert expected == actual - -def test_workload_path(): - organization = "winkle" - location = "nautilus" - workload = "scallop" - expected = "organizations/{organization}/locations/{location}/workloads/{workload}".format(organization=organization, location=location, workload=workload, ) - actual = AssuredWorkloadsServiceClient.workload_path(organization, location, workload) - assert expected == actual - - -def test_parse_workload_path(): - expected = { - "organization": "abalone", - "location": "squid", - "workload": "clam", - } - path = AssuredWorkloadsServiceClient.workload_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_workload_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AssuredWorkloadsServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = AssuredWorkloadsServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = AssuredWorkloadsServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = AssuredWorkloadsServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AssuredWorkloadsServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = AssuredWorkloadsServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = AssuredWorkloadsServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = AssuredWorkloadsServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AssuredWorkloadsServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = AssuredWorkloadsServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AssuredWorkloadsServiceTransport, '_prep_wrapped_messages') as prep: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AssuredWorkloadsServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AssuredWorkloadsServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_get_operation(transport: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/.coveragerc deleted file mode 100644 index 74d92878ed60..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/assuredworkloads/__init__.py - google/cloud/assuredworkloads/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/.flake8 b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/MANIFEST.in deleted file mode 100644 index 28b53face399..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/assuredworkloads *.py -recursive-include google/cloud/assuredworkloads_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/README.rst b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/README.rst deleted file mode 100644 index db9473b13ad5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Assuredworkloads API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Assuredworkloads API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/assured_workloads_service.rst b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/assured_workloads_service.rst deleted file mode 100644 index bd798cb3fa32..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/assured_workloads_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AssuredWorkloadsService ------------------------------------------ - -.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service - :members: - :inherited-members: - -.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/services_.rst b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/services_.rst deleted file mode 100644 index abfac62c50d5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Assuredworkloads v1beta1 API -====================================================== -.. toctree:: - :maxdepth: 2 - - assured_workloads_service diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/types_.rst b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/types_.rst deleted file mode 100644 index 070395b22dfb..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/assuredworkloads_v1beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Assuredworkloads v1beta1 API -=================================================== - -.. automodule:: google.cloud.assuredworkloads_v1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/conf.py deleted file mode 100644 index 470937920a65..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-assured-workloads documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-assured-workloads" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-assured-workloads-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-assured-workloads.tex", - u"google-cloud-assured-workloads Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-assured-workloads", - u"Google Cloud Assuredworkloads Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-assured-workloads", - u"google-cloud-assured-workloads Documentation", - author, - "google-cloud-assured-workloads", - "GAPIC library for Google Cloud Assuredworkloads API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/index.rst deleted file mode 100644 index 48367d704b91..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - assuredworkloads_v1beta1/services_ - assuredworkloads_v1beta1/types_ diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/__init__.py deleted file mode 100644 index 49916ae8da71..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/__init__.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.assuredworkloads import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.client import AssuredWorkloadsServiceClient -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.async_client import AssuredWorkloadsServiceAsyncClient - -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import AnalyzeWorkloadMoveRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import AnalyzeWorkloadMoveResponse -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import CreateWorkloadOperationMetadata -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import CreateWorkloadRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import DeleteWorkloadRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import GetWorkloadRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import ListWorkloadsRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import ListWorkloadsResponse -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import RestrictAllowedResourcesRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import RestrictAllowedResourcesResponse -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import UpdateWorkloadRequest -from google.cloud.assuredworkloads_v1beta1.types.assuredworkloads import Workload - -__all__ = ('AssuredWorkloadsServiceClient', - 'AssuredWorkloadsServiceAsyncClient', - 'AnalyzeWorkloadMoveRequest', - 'AnalyzeWorkloadMoveResponse', - 'CreateWorkloadOperationMetadata', - 'CreateWorkloadRequest', - 'DeleteWorkloadRequest', - 'GetWorkloadRequest', - 'ListWorkloadsRequest', - 'ListWorkloadsResponse', - 'RestrictAllowedResourcesRequest', - 'RestrictAllowedResourcesResponse', - 'UpdateWorkloadRequest', - 'Workload', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/gapic_version.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/py.typed b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/py.typed deleted file mode 100644 index 3762b50eeb3d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-assured-workloads package uses inline types. diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/__init__.py deleted file mode 100644 index b64a442b35b1..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.assured_workloads_service import AssuredWorkloadsServiceClient -from .services.assured_workloads_service import AssuredWorkloadsServiceAsyncClient - -from .types.assuredworkloads import AnalyzeWorkloadMoveRequest -from .types.assuredworkloads import AnalyzeWorkloadMoveResponse -from .types.assuredworkloads import CreateWorkloadOperationMetadata -from .types.assuredworkloads import CreateWorkloadRequest -from .types.assuredworkloads import DeleteWorkloadRequest -from .types.assuredworkloads import GetWorkloadRequest -from .types.assuredworkloads import ListWorkloadsRequest -from .types.assuredworkloads import ListWorkloadsResponse -from .types.assuredworkloads import RestrictAllowedResourcesRequest -from .types.assuredworkloads import RestrictAllowedResourcesResponse -from .types.assuredworkloads import UpdateWorkloadRequest -from .types.assuredworkloads import Workload - -__all__ = ( - 'AssuredWorkloadsServiceAsyncClient', -'AnalyzeWorkloadMoveRequest', -'AnalyzeWorkloadMoveResponse', -'AssuredWorkloadsServiceClient', -'CreateWorkloadOperationMetadata', -'CreateWorkloadRequest', -'DeleteWorkloadRequest', -'GetWorkloadRequest', -'ListWorkloadsRequest', -'ListWorkloadsResponse', -'RestrictAllowedResourcesRequest', -'RestrictAllowedResourcesResponse', -'UpdateWorkloadRequest', -'Workload', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/gapic_metadata.json deleted file mode 100644 index 68c3be657cf7..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,133 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.assuredworkloads_v1beta1", - "protoPackage": "google.cloud.assuredworkloads.v1beta1", - "schema": "1.0", - "services": { - "AssuredWorkloadsService": { - "clients": { - "grpc": { - "libraryClient": "AssuredWorkloadsServiceClient", - "rpcs": { - "AnalyzeWorkloadMove": { - "methods": [ - "analyze_workload_move" - ] - }, - "CreateWorkload": { - "methods": [ - "create_workload" - ] - }, - "DeleteWorkload": { - "methods": [ - "delete_workload" - ] - }, - "GetWorkload": { - "methods": [ - "get_workload" - ] - }, - "ListWorkloads": { - "methods": [ - "list_workloads" - ] - }, - "RestrictAllowedResources": { - "methods": [ - "restrict_allowed_resources" - ] - }, - "UpdateWorkload": { - "methods": [ - "update_workload" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AssuredWorkloadsServiceAsyncClient", - "rpcs": { - "AnalyzeWorkloadMove": { - "methods": [ - "analyze_workload_move" - ] - }, - "CreateWorkload": { - "methods": [ - "create_workload" - ] - }, - "DeleteWorkload": { - "methods": [ - "delete_workload" - ] - }, - "GetWorkload": { - "methods": [ - "get_workload" - ] - }, - "ListWorkloads": { - "methods": [ - "list_workloads" - ] - }, - "RestrictAllowedResources": { - "methods": [ - "restrict_allowed_resources" - ] - }, - "UpdateWorkload": { - "methods": [ - "update_workload" - ] - } - } - }, - "rest": { - "libraryClient": "AssuredWorkloadsServiceClient", - "rpcs": { - "AnalyzeWorkloadMove": { - "methods": [ - "analyze_workload_move" - ] - }, - "CreateWorkload": { - "methods": [ - "create_workload" - ] - }, - "DeleteWorkload": { - "methods": [ - "delete_workload" - ] - }, - "GetWorkload": { - "methods": [ - "get_workload" - ] - }, - "ListWorkloads": { - "methods": [ - "list_workloads" - ] - }, - "RestrictAllowedResources": { - "methods": [ - "restrict_allowed_resources" - ] - }, - "UpdateWorkload": { - "methods": [ - "update_workload" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/py.typed b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/py.typed deleted file mode 100644 index 3762b50eeb3d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-assured-workloads package uses inline types. diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py deleted file mode 100644 index ba20e950ce7c..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AssuredWorkloadsServiceClient -from .async_client import AssuredWorkloadsServiceAsyncClient - -__all__ = ( - 'AssuredWorkloadsServiceClient', - 'AssuredWorkloadsServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py deleted file mode 100644 index 89417cfc850f..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py +++ /dev/null @@ -1,1150 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import pagers -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport -from .client import AssuredWorkloadsServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AssuredWorkloadsServiceAsyncClient: - """Service to manage AssuredWorkloads.""" - - _client: AssuredWorkloadsServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - - workload_path = staticmethod(AssuredWorkloadsServiceClient.workload_path) - parse_workload_path = staticmethod(AssuredWorkloadsServiceClient.parse_workload_path) - common_billing_account_path = staticmethod(AssuredWorkloadsServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AssuredWorkloadsServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AssuredWorkloadsServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AssuredWorkloadsServiceClient.common_project_path) - parse_common_project_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_project_path) - common_location_path = staticmethod(AssuredWorkloadsServiceClient.common_location_path) - parse_common_location_path = staticmethod(AssuredWorkloadsServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceAsyncClient: The constructed client. - """ - return AssuredWorkloadsServiceClient.from_service_account_info.__func__(AssuredWorkloadsServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceAsyncClient: The constructed client. - """ - return AssuredWorkloadsServiceClient.from_service_account_file.__func__(AssuredWorkloadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AssuredWorkloadsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AssuredWorkloadsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssuredWorkloadsServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AssuredWorkloadsServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssuredWorkloadsServiceTransport, Callable[..., AssuredWorkloadsServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the assured workloads service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssuredWorkloadsServiceTransport,Callable[..., AssuredWorkloadsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssuredWorkloadsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AssuredWorkloadsServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "credentialsType": None, - } - ) - - async def create_workload(self, - request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, - *, - parent: Optional[str] = None, - workload: Optional[assuredworkloads.Workload] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates Assured Workload. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_create_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest, dict]]): - The request object. Request for creating a workload. - parent (:class:`str`): - Required. The resource name of the new Workload's - parent. Must be of the form - ``organizations/{org_id}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - workload (:class:`google.cloud.assuredworkloads_v1beta1.types.Workload`): - Required. Assured Workload to create - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.assuredworkloads_v1beta1.types.Workload` An Workload object for managing highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, workload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.CreateWorkloadRequest): - request = assuredworkloads.CreateWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if workload is not None: - request.workload = workload - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - assuredworkloads.Workload, - metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_workload(self, - request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, - *, - workload: Optional[assuredworkloads.Workload] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_update_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = await client.update_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest, dict]]): - The request object. Request for Updating a workload. - workload (:class:`google.cloud.assuredworkloads_v1beta1.types.Workload`): - Required. The workload to update. The workload's - ``name`` field is used to identify the workload to be - updated. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.Workload: - An Workload object for managing - highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [workload, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.UpdateWorkloadRequest): - request = assuredworkloads.UpdateWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if workload is not None: - request.workload = workload - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_workload] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def restrict_allowed_resources(self, - request: Optional[Union[assuredworkloads.RestrictAllowedResourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.RestrictAllowedResourcesResponse: - r"""Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = await client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest, dict]]): - The request object. Request for restricting list of - available resources in Workload - environment. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesResponse: - Response for restricting the list of - allowed resources. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.RestrictAllowedResourcesRequest): - request = assuredworkloads.RestrictAllowedResourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.restrict_allowed_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_workload(self, - request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. In addition to - assuredworkloads.workload.delete permission, the user should - also have orgpolicy.policy.set permission on the deleted folder - to remove Assured Workloads OrgPolicies. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - await client.delete_workload(request=request) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest, dict]]): - The request object. Request for deleting a Workload. - name (:class:`str`): - Required. The ``name`` field is used to identify the - workload. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.DeleteWorkloadRequest): - request = assuredworkloads.DeleteWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_workload(self, - request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Gets Assured Workload associated with a CRM Node - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_get_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = await client.get_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest, dict]]): - The request object. Request for fetching a workload. - name (:class:`str`): - Required. The resource name of the Workload to fetch. - This is the workloads's relative path in the API, - formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.Workload: - An Workload object for managing - highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.GetWorkloadRequest): - request = assuredworkloads.GetWorkloadRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_workload] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def analyze_workload_move(self, - request: Optional[Union[assuredworkloads.AnalyzeWorkloadMoveRequest, dict]] = None, - *, - project: Optional[str] = None, - target: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.AnalyzeWorkloadMoveResponse: - r"""Analyze if the source Assured Workloads can be moved - to the target Assured Workload - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_analyze_workload_move(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.AnalyzeWorkloadMoveRequest( - source="source_value", - target="target_value", - ) - - # Make the request - response = await client.analyze_workload_move(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveRequest, dict]]): - The request object. A request to analyze a hypothetical - move of a source project or - project-based workload to a target - (destination) folder-based workload. - project (:class:`str`): - The source type is a project. Specify the project's - relative resource name, formatted as either a project - number or a project ID: "projects/{PROJECT_NUMBER}" or - "projects/{PROJECT_ID}" For example: - "projects/951040570662" when specifying a project - number, or "projects/my-project-123" when specifying a - project ID. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - target (:class:`str`): - Required. The resource ID of the folder-based - destination workload. This workload is where the source - project will hypothetically be moved to. Specify the - workload's relative resource name, formatted as: - "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" - For example: - "organizations/123/locations/us-east1/workloads/assured-workload-2" - - This corresponds to the ``target`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveResponse: - A response that includes the analysis - of the hypothetical resource move. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [project, target] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.AnalyzeWorkloadMoveRequest): - request = assuredworkloads.AnalyzeWorkloadMoveRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - if target is not None: - request.target = target - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_workload_move] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_workloads(self, - request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListWorkloadsAsyncPager: - r"""Lists Assured Workloads under a CRM Node. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - async def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest, dict]]): - The request object. Request for fetching workloads in an - organization. - parent (:class:`str`): - Required. Parent Resource to list workloads from. Must - be of the form - ``organizations/{org_id}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager: - Response of ListWorkloads endpoint. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.ListWorkloadsRequest): - request = assuredworkloads.ListWorkloadsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_workloads] - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListWorkloadsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AssuredWorkloadsServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssuredWorkloadsServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py deleted file mode 100644 index 43c01f1eb46b..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py +++ /dev/null @@ -1,1516 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import pagers -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AssuredWorkloadsServiceGrpcTransport -from .transports.grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport -from .transports.rest import AssuredWorkloadsServiceRestTransport - - -class AssuredWorkloadsServiceClientMeta(type): - """Metaclass for the AssuredWorkloadsService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] - _transport_registry["grpc"] = AssuredWorkloadsServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AssuredWorkloadsServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AssuredWorkloadsServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssuredWorkloadsServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AssuredWorkloadsServiceClient(metaclass=AssuredWorkloadsServiceClientMeta): - """Service to manage AssuredWorkloads.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "assuredworkloads.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "assuredworkloads.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AssuredWorkloadsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AssuredWorkloadsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AssuredWorkloadsServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def workload_path(organization: str,location: str,workload: str,) -> str: - """Returns a fully-qualified workload string.""" - return "organizations/{organization}/locations/{location}/workloads/{workload}".format(organization=organization, location=location, workload=workload, ) - - @staticmethod - def parse_workload_path(path: str) -> Dict[str,str]: - """Parses a workload path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/locations/(?P.+?)/workloads/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssuredWorkloadsServiceTransport, Callable[..., AssuredWorkloadsServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the assured workloads service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AssuredWorkloadsServiceTransport,Callable[..., AssuredWorkloadsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AssuredWorkloadsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssuredWorkloadsServiceClient._read_environment_variables() - self._client_cert_source = AssuredWorkloadsServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AssuredWorkloadsServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AssuredWorkloadsServiceTransport) - if transport_provided: - # transport is a AssuredWorkloadsServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AssuredWorkloadsServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AssuredWorkloadsServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AssuredWorkloadsServiceTransport], Callable[..., AssuredWorkloadsServiceTransport]] = ( - AssuredWorkloadsServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AssuredWorkloadsServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient`.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "credentialsType": None, - } - ) - - def create_workload(self, - request: Optional[Union[assuredworkloads.CreateWorkloadRequest, dict]] = None, - *, - parent: Optional[str] = None, - workload: Optional[assuredworkloads.Workload] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates Assured Workload. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_create_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest, dict]): - The request object. Request for creating a workload. - parent (str): - Required. The resource name of the new Workload's - parent. Must be of the form - ``organizations/{org_id}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - workload (google.cloud.assuredworkloads_v1beta1.types.Workload): - Required. Assured Workload to create - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.assuredworkloads_v1beta1.types.Workload` An Workload object for managing highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, workload] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.CreateWorkloadRequest): - request = assuredworkloads.CreateWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if workload is not None: - request.workload = workload - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - assuredworkloads.Workload, - metadata_type=assuredworkloads.CreateWorkloadOperationMetadata, - ) - - # Done; return the response. - return response - - def update_workload(self, - request: Optional[Union[assuredworkloads.UpdateWorkloadRequest, dict]] = None, - *, - workload: Optional[assuredworkloads.Workload] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_update_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = client.update_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest, dict]): - The request object. Request for Updating a workload. - workload (google.cloud.assuredworkloads_v1beta1.types.Workload): - Required. The workload to update. The workload's - ``name`` field is used to identify the workload to be - updated. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``workload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.Workload: - An Workload object for managing - highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [workload, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.UpdateWorkloadRequest): - request = assuredworkloads.UpdateWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if workload is not None: - request.workload = workload - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_workload] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def restrict_allowed_resources(self, - request: Optional[Union[assuredworkloads.RestrictAllowedResourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.RestrictAllowedResourcesResponse: - r"""Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest, dict]): - The request object. Request for restricting list of - available resources in Workload - environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesResponse: - Response for restricting the list of - allowed resources. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.RestrictAllowedResourcesRequest): - request = assuredworkloads.RestrictAllowedResourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restrict_allowed_resources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_workload(self, - request: Optional[Union[assuredworkloads.DeleteWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. In addition to - assuredworkloads.workload.delete permission, the user should - also have orgpolicy.policy.set permission on the deleted folder - to remove Assured Workloads OrgPolicies. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - client.delete_workload(request=request) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest, dict]): - The request object. Request for deleting a Workload. - name (str): - Required. The ``name`` field is used to identify the - workload. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.DeleteWorkloadRequest): - request = assuredworkloads.DeleteWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_workload] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_workload(self, - request: Optional[Union[assuredworkloads.GetWorkloadRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.Workload: - r"""Gets Assured Workload associated with a CRM Node - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_get_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = client.get_workload(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest, dict]): - The request object. Request for fetching a workload. - name (str): - Required. The resource name of the Workload to fetch. - This is the workloads's relative path in the API, - formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.Workload: - An Workload object for managing - highly regulated workloads of cloud - customers. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.GetWorkloadRequest): - request = assuredworkloads.GetWorkloadRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_workload] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def analyze_workload_move(self, - request: Optional[Union[assuredworkloads.AnalyzeWorkloadMoveRequest, dict]] = None, - *, - project: Optional[str] = None, - target: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> assuredworkloads.AnalyzeWorkloadMoveResponse: - r"""Analyze if the source Assured Workloads can be moved - to the target Assured Workload - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_analyze_workload_move(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.AnalyzeWorkloadMoveRequest( - source="source_value", - target="target_value", - ) - - # Make the request - response = client.analyze_workload_move(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveRequest, dict]): - The request object. A request to analyze a hypothetical - move of a source project or - project-based workload to a target - (destination) folder-based workload. - project (str): - The source type is a project. Specify the project's - relative resource name, formatted as either a project - number or a project ID: "projects/{PROJECT_NUMBER}" or - "projects/{PROJECT_ID}" For example: - "projects/951040570662" when specifying a project - number, or "projects/my-project-123" when specifying a - project ID. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - target (str): - Required. The resource ID of the folder-based - destination workload. This workload is where the source - project will hypothetically be moved to. Specify the - workload's relative resource name, formatted as: - "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" - For example: - "organizations/123/locations/us-east1/workloads/assured-workload-2" - - This corresponds to the ``target`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveResponse: - A response that includes the analysis - of the hypothetical resource move. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [project, target] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.AnalyzeWorkloadMoveRequest): - request = assuredworkloads.AnalyzeWorkloadMoveRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - if target is not None: - request.target = target - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.analyze_workload_move] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_workloads(self, - request: Optional[Union[assuredworkloads.ListWorkloadsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListWorkloadsPager: - r"""Lists Assured Workloads under a CRM Node. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import assuredworkloads_v1beta1 - - def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest, dict]): - The request object. Request for fetching workloads in an - organization. - parent (str): - Required. Parent Resource to list workloads from. Must - be of the form - ``organizations/{org_id}/locations/{location}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsPager: - Response of ListWorkloads endpoint. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, assuredworkloads.ListWorkloadsRequest): - request = assuredworkloads.ListWorkloadsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_workloads] - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListWorkloadsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AssuredWorkloadsServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AssuredWorkloadsServiceClient", -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py deleted file mode 100644 index 000927d13716..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads - - -class ListWorkloadsPager: - """A pager for iterating through ``list_workloads`` requests. - - This class thinly wraps an initial - :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``workloads`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListWorkloads`` requests and continue to iterate - through the ``workloads`` field on the - corresponding responses. - - All the usual :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., assuredworkloads.ListWorkloadsResponse], - request: assuredworkloads.ListWorkloadsRequest, - response: assuredworkloads.ListWorkloadsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest): - The initial request object. - response (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = assuredworkloads.ListWorkloadsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[assuredworkloads.ListWorkloadsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[assuredworkloads.Workload]: - for page in self.pages: - yield from page.workloads - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListWorkloadsAsyncPager: - """A pager for iterating through ``list_workloads`` requests. - - This class thinly wraps an initial - :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``workloads`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListWorkloads`` requests and continue to iterate - through the ``workloads`` field on the - corresponding responses. - - All the usual :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[assuredworkloads.ListWorkloadsResponse]], - request: assuredworkloads.ListWorkloadsRequest, - response: assuredworkloads.ListWorkloadsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest): - The initial request object. - response (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = assuredworkloads.ListWorkloadsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[assuredworkloads.ListWorkloadsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[assuredworkloads.Workload]: - async def async_generator(): - async for page in self.pages: - for response in page.workloads: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/README.rst b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/README.rst deleted file mode 100644 index 30f56618bd12..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AssuredWorkloadsServiceTransport` is the ABC for all transports. -- public child `AssuredWorkloadsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AssuredWorkloadsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAssuredWorkloadsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AssuredWorkloadsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py deleted file mode 100644 index 175c162f0257..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AssuredWorkloadsServiceTransport -from .grpc import AssuredWorkloadsServiceGrpcTransport -from .grpc_asyncio import AssuredWorkloadsServiceGrpcAsyncIOTransport -from .rest import AssuredWorkloadsServiceRestTransport -from .rest import AssuredWorkloadsServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AssuredWorkloadsServiceTransport]] -_transport_registry['grpc'] = AssuredWorkloadsServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssuredWorkloadsServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssuredWorkloadsServiceRestTransport - -__all__ = ( - 'AssuredWorkloadsServiceTransport', - 'AssuredWorkloadsServiceGrpcTransport', - 'AssuredWorkloadsServiceGrpcAsyncIOTransport', - 'AssuredWorkloadsServiceRestTransport', - 'AssuredWorkloadsServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py deleted file mode 100644 index 2f8d36ace412..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py +++ /dev/null @@ -1,310 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.assuredworkloads_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AssuredWorkloadsServiceTransport(abc.ABC): - """Abstract transport class for AssuredWorkloadsService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'assuredworkloads.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_workload: gapic_v1.method.wrap_method( - self.create_workload, - default_timeout=60.0, - client_info=client_info, - ), - self.update_workload: gapic_v1.method.wrap_method( - self.update_workload, - default_timeout=60.0, - client_info=client_info, - ), - self.restrict_allowed_resources: gapic_v1.method.wrap_method( - self.restrict_allowed_resources, - default_timeout=None, - client_info=client_info, - ), - self.delete_workload: gapic_v1.method.wrap_method( - self.delete_workload, - default_retry=retries.Retry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_workload: gapic_v1.method.wrap_method( - self.get_workload, - default_retry=retries.Retry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_workload_move: gapic_v1.method.wrap_method( - self.analyze_workload_move, - default_retry=retries.Retry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_workloads: gapic_v1.method.wrap_method( - self.list_workloads, - default_retry=retries.Retry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - Union[ - assuredworkloads.Workload, - Awaitable[assuredworkloads.Workload] - ]]: - raise NotImplementedError() - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - Union[ - assuredworkloads.RestrictAllowedResourcesResponse, - Awaitable[assuredworkloads.RestrictAllowedResourcesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - Union[ - assuredworkloads.Workload, - Awaitable[assuredworkloads.Workload] - ]]: - raise NotImplementedError() - - @property - def analyze_workload_move(self) -> Callable[ - [assuredworkloads.AnalyzeWorkloadMoveRequest], - Union[ - assuredworkloads.AnalyzeWorkloadMoveResponse, - Awaitable[assuredworkloads.AnalyzeWorkloadMoveResponse] - ]]: - raise NotImplementedError() - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - Union[ - assuredworkloads.ListWorkloadsResponse, - Awaitable[assuredworkloads.ListWorkloadsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AssuredWorkloadsServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py deleted file mode 100644 index 78f45bd1505c..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py +++ /dev/null @@ -1,575 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssuredWorkloadsServiceGrpcTransport(AssuredWorkloadsServiceTransport): - """gRPC backend transport for AssuredWorkloadsService. - - Service to manage AssuredWorkloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - operations_pb2.Operation]: - r"""Return a callable for the create workload method over gRPC. - - Creates Assured Workload. - - Returns: - Callable[[~.CreateWorkloadRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_workload' not in self._stubs: - self._stubs['create_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/CreateWorkload', - request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_workload'] - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - assuredworkloads.Workload]: - r"""Return a callable for the update workload method over gRPC. - - Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - Returns: - Callable[[~.UpdateWorkloadRequest], - ~.Workload]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_workload' not in self._stubs: - self._stubs['update_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/UpdateWorkload', - request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['update_workload'] - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - assuredworkloads.RestrictAllowedResourcesResponse]: - r"""Return a callable for the restrict allowed resources method over gRPC. - - Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - Returns: - Callable[[~.RestrictAllowedResourcesRequest], - ~.RestrictAllowedResourcesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restrict_allowed_resources' not in self._stubs: - self._stubs['restrict_allowed_resources'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/RestrictAllowedResources', - request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, - response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, - ) - return self._stubs['restrict_allowed_resources'] - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete workload method over gRPC. - - Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. In addition to - assuredworkloads.workload.delete permission, the user should - also have orgpolicy.policy.set permission on the deleted folder - to remove Assured Workloads OrgPolicies. - - Returns: - Callable[[~.DeleteWorkloadRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_workload' not in self._stubs: - self._stubs['delete_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/DeleteWorkload', - request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_workload'] - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - assuredworkloads.Workload]: - r"""Return a callable for the get workload method over gRPC. - - Gets Assured Workload associated with a CRM Node - - Returns: - Callable[[~.GetWorkloadRequest], - ~.Workload]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_workload' not in self._stubs: - self._stubs['get_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/GetWorkload', - request_serializer=assuredworkloads.GetWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['get_workload'] - - @property - def analyze_workload_move(self) -> Callable[ - [assuredworkloads.AnalyzeWorkloadMoveRequest], - assuredworkloads.AnalyzeWorkloadMoveResponse]: - r"""Return a callable for the analyze workload move method over gRPC. - - Analyze if the source Assured Workloads can be moved - to the target Assured Workload - - Returns: - Callable[[~.AnalyzeWorkloadMoveRequest], - ~.AnalyzeWorkloadMoveResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_workload_move' not in self._stubs: - self._stubs['analyze_workload_move'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/AnalyzeWorkloadMove', - request_serializer=assuredworkloads.AnalyzeWorkloadMoveRequest.serialize, - response_deserializer=assuredworkloads.AnalyzeWorkloadMoveResponse.deserialize, - ) - return self._stubs['analyze_workload_move'] - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - assuredworkloads.ListWorkloadsResponse]: - r"""Return a callable for the list workloads method over gRPC. - - Lists Assured Workloads under a CRM Node. - - Returns: - Callable[[~.ListWorkloadsRequest], - ~.ListWorkloadsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_workloads' not in self._stubs: - self._stubs['list_workloads'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/ListWorkloads', - request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, - response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, - ) - return self._stubs['list_workloads'] - - def close(self): - self._logged_channel.close() - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AssuredWorkloadsServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py deleted file mode 100644 index d2560317826c..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,671 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AssuredWorkloadsServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AssuredWorkloadsServiceGrpcAsyncIOTransport(AssuredWorkloadsServiceTransport): - """gRPC AsyncIO backend transport for AssuredWorkloadsService. - - Service to manage AssuredWorkloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create workload method over gRPC. - - Creates Assured Workload. - - Returns: - Callable[[~.CreateWorkloadRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_workload' not in self._stubs: - self._stubs['create_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/CreateWorkload', - request_serializer=assuredworkloads.CreateWorkloadRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_workload'] - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - Awaitable[assuredworkloads.Workload]]: - r"""Return a callable for the update workload method over gRPC. - - Updates an existing workload. Currently allows updating of - workload display_name and labels. For force updates don't set - etag field in the Workload. Only one update operation per - workload can be in progress. - - Returns: - Callable[[~.UpdateWorkloadRequest], - Awaitable[~.Workload]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_workload' not in self._stubs: - self._stubs['update_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/UpdateWorkload', - request_serializer=assuredworkloads.UpdateWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['update_workload'] - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - Awaitable[assuredworkloads.RestrictAllowedResourcesResponse]]: - r"""Return a callable for the restrict allowed resources method over gRPC. - - Restrict the list of resources allowed in the - Workload environment. The current list of allowed - products can be found at - https://cloud.google.com/assured-workloads/docs/supported-products - In addition to assuredworkloads.workload.update - permission, the user should also have - orgpolicy.policy.set permission on the folder resource - to use this functionality. - - Returns: - Callable[[~.RestrictAllowedResourcesRequest], - Awaitable[~.RestrictAllowedResourcesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restrict_allowed_resources' not in self._stubs: - self._stubs['restrict_allowed_resources'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/RestrictAllowedResources', - request_serializer=assuredworkloads.RestrictAllowedResourcesRequest.serialize, - response_deserializer=assuredworkloads.RestrictAllowedResourcesResponse.deserialize, - ) - return self._stubs['restrict_allowed_resources'] - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete workload method over gRPC. - - Deletes the workload. Make sure that workload's direct children - are already in a deleted state, otherwise the request will fail - with a FAILED_PRECONDITION error. In addition to - assuredworkloads.workload.delete permission, the user should - also have orgpolicy.policy.set permission on the deleted folder - to remove Assured Workloads OrgPolicies. - - Returns: - Callable[[~.DeleteWorkloadRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_workload' not in self._stubs: - self._stubs['delete_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/DeleteWorkload', - request_serializer=assuredworkloads.DeleteWorkloadRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_workload'] - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - Awaitable[assuredworkloads.Workload]]: - r"""Return a callable for the get workload method over gRPC. - - Gets Assured Workload associated with a CRM Node - - Returns: - Callable[[~.GetWorkloadRequest], - Awaitable[~.Workload]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_workload' not in self._stubs: - self._stubs['get_workload'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/GetWorkload', - request_serializer=assuredworkloads.GetWorkloadRequest.serialize, - response_deserializer=assuredworkloads.Workload.deserialize, - ) - return self._stubs['get_workload'] - - @property - def analyze_workload_move(self) -> Callable[ - [assuredworkloads.AnalyzeWorkloadMoveRequest], - Awaitable[assuredworkloads.AnalyzeWorkloadMoveResponse]]: - r"""Return a callable for the analyze workload move method over gRPC. - - Analyze if the source Assured Workloads can be moved - to the target Assured Workload - - Returns: - Callable[[~.AnalyzeWorkloadMoveRequest], - Awaitable[~.AnalyzeWorkloadMoveResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'analyze_workload_move' not in self._stubs: - self._stubs['analyze_workload_move'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/AnalyzeWorkloadMove', - request_serializer=assuredworkloads.AnalyzeWorkloadMoveRequest.serialize, - response_deserializer=assuredworkloads.AnalyzeWorkloadMoveResponse.deserialize, - ) - return self._stubs['analyze_workload_move'] - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - Awaitable[assuredworkloads.ListWorkloadsResponse]]: - r"""Return a callable for the list workloads method over gRPC. - - Lists Assured Workloads under a CRM Node. - - Returns: - Callable[[~.ListWorkloadsRequest], - Awaitable[~.ListWorkloadsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_workloads' not in self._stubs: - self._stubs['list_workloads'] = self._logged_channel.unary_unary( - '/google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService/ListWorkloads', - request_serializer=assuredworkloads.ListWorkloadsRequest.serialize, - response_deserializer=assuredworkloads.ListWorkloadsResponse.deserialize, - ) - return self._stubs['list_workloads'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_workload: self._wrap_method( - self.create_workload, - default_timeout=60.0, - client_info=client_info, - ), - self.update_workload: self._wrap_method( - self.update_workload, - default_timeout=60.0, - client_info=client_info, - ), - self.restrict_allowed_resources: self._wrap_method( - self.restrict_allowed_resources, - default_timeout=None, - client_info=client_info, - ), - self.delete_workload: self._wrap_method( - self.delete_workload, - default_retry=retries.AsyncRetry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_workload: self._wrap_method( - self.get_workload, - default_retry=retries.AsyncRetry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.analyze_workload_move: self._wrap_method( - self.analyze_workload_move, - default_retry=retries.AsyncRetry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_workloads: self._wrap_method( - self.list_workloads, - default_retry=retries.AsyncRetry( - initial=0.2, - maximum=30.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - -__all__ = ( - 'AssuredWorkloadsServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py deleted file mode 100644 index 028a8afe5c26..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest.py +++ /dev/null @@ -1,1071 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAssuredWorkloadsServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AssuredWorkloadsServiceRestInterceptor: - """Interceptor for AssuredWorkloadsService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AssuredWorkloadsServiceRestTransport. - - .. code-block:: python - class MyCustomAssuredWorkloadsServiceInterceptor(AssuredWorkloadsServiceRestInterceptor): - def pre_analyze_workload_move(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_analyze_workload_move(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_workload(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_workload(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_workloads(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_workloads(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restrict_allowed_resources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restrict_allowed_resources(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_workload(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_workload(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AssuredWorkloadsServiceRestTransport(interceptor=MyCustomAssuredWorkloadsServiceInterceptor()) - client = AssuredWorkloadsServiceClient(transport=transport) - - - """ - def pre_create_workload(self, request: assuredworkloads.CreateWorkloadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.CreateWorkloadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_workload - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_create_workload(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_workload - - DEPRECATED. Please use the `post_create_workload_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_create_workload` interceptor runs - before the `post_create_workload_with_metadata` interceptor. - """ - return response - - def post_create_workload_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_workload - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_create_workload_with_metadata` - interceptor in new development instead of the `post_create_workload` interceptor. - When both interceptors are used, this `post_create_workload_with_metadata` interceptor runs after the - `post_create_workload` interceptor. The (possibly modified) response returned by - `post_create_workload` will be passed to - `post_create_workload_with_metadata`. - """ - return response, metadata - - def pre_delete_workload(self, request: assuredworkloads.DeleteWorkloadRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.DeleteWorkloadRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_workload - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def pre_restrict_allowed_resources(self, request: assuredworkloads.RestrictAllowedResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.RestrictAllowedResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for restrict_allowed_resources - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_restrict_allowed_resources(self, response: assuredworkloads.RestrictAllowedResourcesResponse) -> assuredworkloads.RestrictAllowedResourcesResponse: - """Post-rpc interceptor for restrict_allowed_resources - - DEPRECATED. Please use the `post_restrict_allowed_resources_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. This `post_restrict_allowed_resources` interceptor runs - before the `post_restrict_allowed_resources_with_metadata` interceptor. - """ - return response - - def post_restrict_allowed_resources_with_metadata(self, response: assuredworkloads.RestrictAllowedResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[assuredworkloads.RestrictAllowedResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restrict_allowed_resources - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AssuredWorkloadsService server but before it is returned to user code. - - We recommend only using this `post_restrict_allowed_resources_with_metadata` - interceptor in new development instead of the `post_restrict_allowed_resources` interceptor. - When both interceptors are used, this `post_restrict_allowed_resources_with_metadata` interceptor runs after the - `post_restrict_allowed_resources` interceptor. The (possibly modified) response returned by - `post_restrict_allowed_resources` will be passed to - `post_restrict_allowed_resources_with_metadata`. - """ - return response, metadata - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AssuredWorkloadsService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the AssuredWorkloadsService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AssuredWorkloadsServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AssuredWorkloadsServiceRestInterceptor - - -class AssuredWorkloadsServiceRestTransport(_BaseAssuredWorkloadsServiceRestTransport): - """REST backend synchronous transport for AssuredWorkloadsService. - - Service to manage AssuredWorkloads. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssuredWorkloadsServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AssuredWorkloadsServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1beta1/{name=organizations/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1beta1/{name=organizations/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1beta1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _AnalyzeWorkloadMove(_BaseAssuredWorkloadsServiceRestTransport._BaseAnalyzeWorkloadMove, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.AnalyzeWorkloadMove") - - def __call__(self, - request: assuredworkloads.AnalyzeWorkloadMoveRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.AnalyzeWorkloadMoveResponse: - raise NotImplementedError( - "Method AnalyzeWorkloadMove is not available over REST transport" - ) - class _CreateWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.CreateWorkload") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: assuredworkloads.CreateWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create workload method over HTTP. - - Args: - request (~.assuredworkloads.CreateWorkloadRequest): - The request object. Request for creating a workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_http_options() - - request, metadata = self._interceptor.pre_create_workload(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_transcoded_request(http_options, request) - - body = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.CreateWorkload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "CreateWorkload", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._CreateWorkload._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_workload(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_workload_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.create_workload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "CreateWorkload", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.DeleteWorkload") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: assuredworkloads.DeleteWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete workload method over HTTP. - - Args: - request (~.assuredworkloads.DeleteWorkloadRequest): - The request object. Request for deleting a Workload. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_http_options() - - request, metadata = self._interceptor.pre_delete_workload(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.DeleteWorkload", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "DeleteWorkload", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._DeleteWorkload._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseGetWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.GetWorkload") - - def __call__(self, - request: assuredworkloads.GetWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.Workload: - raise NotImplementedError( - "Method GetWorkload is not available over REST transport" - ) - class _ListWorkloads(_BaseAssuredWorkloadsServiceRestTransport._BaseListWorkloads, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.ListWorkloads") - - def __call__(self, - request: assuredworkloads.ListWorkloadsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.ListWorkloadsResponse: - raise NotImplementedError( - "Method ListWorkloads is not available over REST transport" - ) - class _RestrictAllowedResources(_BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.RestrictAllowedResources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: assuredworkloads.RestrictAllowedResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.RestrictAllowedResourcesResponse: - r"""Call the restrict allowed - resources method over HTTP. - - Args: - request (~.assuredworkloads.RestrictAllowedResourcesRequest): - The request object. Request for restricting list of - available resources in Workload - environment. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.assuredworkloads.RestrictAllowedResourcesResponse: - Response for restricting the list of - allowed resources. - - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_http_options() - - request, metadata = self._interceptor.pre_restrict_allowed_resources(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_transcoded_request(http_options, request) - - body = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.RestrictAllowedResources", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "RestrictAllowedResources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._RestrictAllowedResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = assuredworkloads.RestrictAllowedResourcesResponse() - pb_resp = assuredworkloads.RestrictAllowedResourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restrict_allowed_resources(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restrict_allowed_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = assuredworkloads.RestrictAllowedResourcesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.restrict_allowed_resources", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "RestrictAllowedResources", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateWorkload(_BaseAssuredWorkloadsServiceRestTransport._BaseUpdateWorkload, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.UpdateWorkload") - - def __call__(self, - request: assuredworkloads.UpdateWorkloadRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> assuredworkloads.Workload: - raise NotImplementedError( - "Method UpdateWorkload is not available over REST transport" - ) - - @property - def analyze_workload_move(self) -> Callable[ - [assuredworkloads.AnalyzeWorkloadMoveRequest], - assuredworkloads.AnalyzeWorkloadMoveResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AnalyzeWorkloadMove(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_workload(self) -> Callable[ - [assuredworkloads.CreateWorkloadRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_workload(self) -> Callable[ - [assuredworkloads.DeleteWorkloadRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_workload(self) -> Callable[ - [assuredworkloads.GetWorkloadRequest], - assuredworkloads.Workload]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_workloads(self) -> Callable[ - [assuredworkloads.ListWorkloadsRequest], - assuredworkloads.ListWorkloadsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListWorkloads(self._session, self._host, self._interceptor) # type: ignore - - @property - def restrict_allowed_resources(self) -> Callable[ - [assuredworkloads.RestrictAllowedResourcesRequest], - assuredworkloads.RestrictAllowedResourcesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestrictAllowedResources(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_workload(self) -> Callable[ - [assuredworkloads.UpdateWorkloadRequest], - assuredworkloads.Workload]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateWorkload(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseAssuredWorkloadsServiceRestTransport._BaseListOperations, AssuredWorkloadsServiceRestStub): - def __hash__(self): - return hash("AssuredWorkloadsServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseAssuredWorkloadsServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseAssuredWorkloadsServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAssuredWorkloadsServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AssuredWorkloadsServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AssuredWorkloadsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest_base.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest_base.py deleted file mode 100644 index 56fd83d274e1..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/rest_base.py +++ /dev/null @@ -1,290 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AssuredWorkloadsServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAssuredWorkloadsServiceRestTransport(AssuredWorkloadsServiceTransport): - """Base REST backend transport for AssuredWorkloadsService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'assuredworkloads.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'assuredworkloads.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseAnalyzeWorkloadMove: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseCreateWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{parent=organizations/*/locations/*}/workloads', - 'body': 'workload', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.CreateWorkloadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseCreateWorkload._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta1/{name=organizations/*/locations/*/workloads/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.DeleteWorkloadRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseDeleteWorkload._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseListWorkloads: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseRestrictAllowedResources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=organizations/*/locations/*/workloads/*}:restrictAllowedResources', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = assuredworkloads.RestrictAllowedResourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAssuredWorkloadsServiceRestTransport._BaseRestrictAllowedResources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateWorkload: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=organizations/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=organizations/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAssuredWorkloadsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/__init__.py deleted file mode 100644 index effa8d75c648..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .assuredworkloads import ( - AnalyzeWorkloadMoveRequest, - AnalyzeWorkloadMoveResponse, - CreateWorkloadOperationMetadata, - CreateWorkloadRequest, - DeleteWorkloadRequest, - GetWorkloadRequest, - ListWorkloadsRequest, - ListWorkloadsResponse, - RestrictAllowedResourcesRequest, - RestrictAllowedResourcesResponse, - UpdateWorkloadRequest, - Workload, -) - -__all__ = ( - 'AnalyzeWorkloadMoveRequest', - 'AnalyzeWorkloadMoveResponse', - 'CreateWorkloadOperationMetadata', - 'CreateWorkloadRequest', - 'DeleteWorkloadRequest', - 'GetWorkloadRequest', - 'ListWorkloadsRequest', - 'ListWorkloadsResponse', - 'RestrictAllowedResourcesRequest', - 'RestrictAllowedResourcesResponse', - 'UpdateWorkloadRequest', - 'Workload', -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads.py deleted file mode 100644 index 6a6377754d8e..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads.py +++ /dev/null @@ -1,888 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.assuredworkloads.v1beta1', - manifest={ - 'CreateWorkloadRequest', - 'UpdateWorkloadRequest', - 'RestrictAllowedResourcesRequest', - 'RestrictAllowedResourcesResponse', - 'DeleteWorkloadRequest', - 'GetWorkloadRequest', - 'AnalyzeWorkloadMoveRequest', - 'AnalyzeWorkloadMoveResponse', - 'ListWorkloadsRequest', - 'ListWorkloadsResponse', - 'Workload', - 'CreateWorkloadOperationMetadata', - }, -) - - -class CreateWorkloadRequest(proto.Message): - r"""Request for creating a workload. - - Attributes: - parent (str): - Required. The resource name of the new Workload's parent. - Must be of the form - ``organizations/{org_id}/locations/{location_id}``. - workload (google.cloud.assuredworkloads_v1beta1.types.Workload): - Required. Assured Workload to create - external_id (str): - Optional. A identifier associated with the - workload and underlying projects which allows - for the break down of billing costs for a - workload. The value provided for the identifier - will add a label to the workload and contained - projects with the identifier as the value. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - workload: 'Workload' = proto.Field( - proto.MESSAGE, - number=2, - message='Workload', - ) - external_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateWorkloadRequest(proto.Message): - r"""Request for Updating a workload. - - Attributes: - workload (google.cloud.assuredworkloads_v1beta1.types.Workload): - Required. The workload to update. The workload's ``name`` - field is used to identify the workload to be updated. - Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to be updated. - """ - - workload: 'Workload' = proto.Field( - proto.MESSAGE, - number=1, - message='Workload', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RestrictAllowedResourcesRequest(proto.Message): - r"""Request for restricting list of available resources in - Workload environment. - - Attributes: - name (str): - Required. The resource name of the Workload. This is the - workloads's relative path in the API, formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - restriction_type (google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest.RestrictionType): - Required. The type of restriction for using - gcp products in the Workload environment. - """ - class RestrictionType(proto.Enum): - r"""The type of restriction. - - Values: - RESTRICTION_TYPE_UNSPECIFIED (0): - Unknown restriction type. - ALLOW_ALL_GCP_RESOURCES (1): - Allow the use all of all gcp products, - irrespective of the compliance posture. This - effectively removes gcp.restrictServiceUsage - OrgPolicy on the AssuredWorkloads Folder. - ALLOW_COMPLIANT_RESOURCES (2): - Based on Workload's compliance regime, - allowed list changes. See - - https://cloud.google.com/assured-workloads/docs/supported-products - for the list of supported resources. - """ - RESTRICTION_TYPE_UNSPECIFIED = 0 - ALLOW_ALL_GCP_RESOURCES = 1 - ALLOW_COMPLIANT_RESOURCES = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - restriction_type: RestrictionType = proto.Field( - proto.ENUM, - number=2, - enum=RestrictionType, - ) - - -class RestrictAllowedResourcesResponse(proto.Message): - r"""Response for restricting the list of allowed resources. - """ - - -class DeleteWorkloadRequest(proto.Message): - r"""Request for deleting a Workload. - - Attributes: - name (str): - Required. The ``name`` field is used to identify the - workload. Format: - organizations/{org_id}/locations/{location_id}/workloads/{workload_id} - etag (str): - Optional. The etag of the workload. - If this is provided, it must match the server's - etag. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - etag: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetWorkloadRequest(proto.Message): - r"""Request for fetching a workload. - - Attributes: - name (str): - Required. The resource name of the Workload to fetch. This - is the workloads's relative path in the API, formatted as - "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". - For example, - "organizations/123/locations/us-east1/workloads/assured-workload-1". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class AnalyzeWorkloadMoveRequest(proto.Message): - r"""A request to analyze a hypothetical move of a source project - or project-based workload to a target (destination) folder-based - workload. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - source (str): - The source type is a project-based workload. Specify the - workloads's relative resource name, formatted as: - "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" - For example: - "organizations/123/locations/us-east1/workloads/assured-workload-1". - - This field is a member of `oneof`_ ``projectOrWorkloadResource``. - project (str): - The source type is a project. Specify the project's relative - resource name, formatted as either a project number or a - project ID: "projects/{PROJECT_NUMBER}" or - "projects/{PROJECT_ID}" For example: "projects/951040570662" - when specifying a project number, or - "projects/my-project-123" when specifying a project ID. - - This field is a member of `oneof`_ ``projectOrWorkloadResource``. - target (str): - Required. The resource ID of the folder-based destination - workload. This workload is where the source project will - hypothetically be moved to. Specify the workload's relative - resource name, formatted as: - "organizations/{ORGANIZATION_ID}/locations/{LOCATION_ID}/workloads/{WORKLOAD_ID}" - For example: - "organizations/123/locations/us-east1/workloads/assured-workload-2". - """ - - source: str = proto.Field( - proto.STRING, - number=1, - oneof='projectOrWorkloadResource', - ) - project: str = proto.Field( - proto.STRING, - number=3, - oneof='projectOrWorkloadResource', - ) - target: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AnalyzeWorkloadMoveResponse(proto.Message): - r"""A response that includes the analysis of the hypothetical - resource move. - - Attributes: - blockers (MutableSequence[str]): - A list of blockers that should be addressed - before moving the source project or - project-based workload to the destination - folder-based workload. - """ - - blockers: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class ListWorkloadsRequest(proto.Message): - r"""Request for fetching workloads in an organization. - - Attributes: - parent (str): - Required. Parent Resource to list workloads from. Must be of - the form ``organizations/{org_id}/locations/{location}``. - page_size (int): - Page size. - page_token (str): - Page token returned from previous request. - Page token contains context from previous - request. Page token needs to be passed in the - second and following requests. - filter (str): - A custom filter for filtering by properties - of a workload. At this time, only filtering by - labels is supported. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListWorkloadsResponse(proto.Message): - r"""Response of ListWorkloads endpoint. - - Attributes: - workloads (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload]): - List of Workloads under a given parent. - next_page_token (str): - The next page token. Return empty if reached - the last page. - """ - - @property - def raw_page(self): - return self - - workloads: MutableSequence['Workload'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Workload', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Workload(proto.Message): - r"""An Workload object for managing highly regulated workloads of - cloud customers. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Optional. The resource name of the workload. - Format: - - organizations/{organization}/locations/{location}/workloads/{workload} - - Read-only. - display_name (str): - Required. The user-assigned display name of - the Workload. When present it must be between 4 - to 30 characters. Allowed characters are: - lowercase and uppercase letters, numbers, - hyphen, and spaces. - - Example: My Workload - resources (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo]): - Output only. The resources associated with - this workload. These resources will be created - when creating the workload. If any of the - projects already exist, the workload creation - will fail. Always read only. - compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime): - Required. Immutable. Compliance Regime - associated with this workload. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Immutable. The Workload creation - timestamp. - billing_account (str): - Output only. The billing account used for the resources - which are direct children of workload. This billing account - is initially associated with the resources created as part - of Workload creation. After the initial creation of these - resources, the customer can change the assigned billing - account. The resource name has the form - ``billingAccounts/{billing_account_id}``. For example, - ``billingAccounts/012345-567890-ABCDEF``. - il4_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.IL4Settings): - Input only. Immutable. Settings specific to - resources needed for IL4. - - This field is a member of `oneof`_ ``compliance_regime_settings``. - cjis_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.CJISSettings): - Input only. Immutable. Settings specific to - resources needed for CJIS. - - This field is a member of `oneof`_ ``compliance_regime_settings``. - fedramp_high_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampHighSettings): - Input only. Immutable. Settings specific to - resources needed for FedRAMP High. - - This field is a member of `oneof`_ ``compliance_regime_settings``. - fedramp_moderate_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampModerateSettings): - Input only. Immutable. Settings specific to - resources needed for FedRAMP Moderate. - - This field is a member of `oneof`_ ``compliance_regime_settings``. - etag (str): - Optional. ETag of the workload, it is - calculated on the basis of the Workload - contents. It will be used in Update & Delete - operations. - labels (MutableMapping[str, str]): - Optional. Labels applied to the workload. - provisioned_resources_parent (str): - Input only. The parent resource for the resources managed by - this Assured Workload. May be either empty or a folder - resource which is a child of the Workload parent. If not - specified all resources are created under the parent - organization. Format: folders/{folder_id} - kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): - Input only. Settings used to create a CMEK crypto key. When - set, a project with a KMS CMEK key is provisioned. This - field is deprecated as of Feb 28, 2022. In order to create a - Keyring, callers should specify, ENCRYPTION_KEYS_PROJECT or - KEYRING in ResourceSettings.resource_type field. - resource_settings (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]): - Input only. Resource properties that are used - to customize workload resources. These - properties (such as custom project id) will be - used to create workload resources if possible. - This field is optional. - kaj_enrollment_state (google.cloud.assuredworkloads_v1beta1.types.Workload.KajEnrollmentState): - Output only. Represents the KAJ enrollment - state of the given workload. - enable_sovereign_controls (bool): - Optional. Indicates the sovereignty status of - the given workload. Currently meant to be used - by Europe/Canada customers. - saa_enrollment_response (google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse): - Output only. Represents the SAA enrollment - response of the given workload. SAA enrollment - response is queried during GetWorkload call. In - failure cases, user friendly error message is - shown in SAA details page. - compliant_but_disallowed_services (MutableSequence[str]): - Output only. Urls for services which are - compliant for this Assured Workload, but which - are currently disallowed by the - ResourceUsageRestriction org policy. Invoke - RestrictAllowedResources endpoint to allow your - project developers to use these services in - their environment.". - """ - class ComplianceRegime(proto.Enum): - r"""Supported Compliance Regimes. - - Values: - COMPLIANCE_REGIME_UNSPECIFIED (0): - Unknown compliance regime. - IL4 (1): - Information protection as per DoD IL4 - requirements. - CJIS (2): - Criminal Justice Information Services (CJIS) - Security policies. - FEDRAMP_HIGH (3): - FedRAMP High data protection controls - FEDRAMP_MODERATE (4): - FedRAMP Moderate data protection controls - US_REGIONAL_ACCESS (5): - Assured Workloads For US Regions data - protection controls - HIPAA (6): - Health Insurance Portability and - Accountability Act controls - HITRUST (7): - Health Information Trust Alliance controls - EU_REGIONS_AND_SUPPORT (8): - Assured Workloads For EU Regions and Support - controls - CA_REGIONS_AND_SUPPORT (9): - Assured Workloads For Canada Regions and - Support controls - ITAR (10): - International Traffic in Arms Regulations - AU_REGIONS_AND_US_SUPPORT (11): - Assured Workloads for Australia Regions and - Support controls - """ - COMPLIANCE_REGIME_UNSPECIFIED = 0 - IL4 = 1 - CJIS = 2 - FEDRAMP_HIGH = 3 - FEDRAMP_MODERATE = 4 - US_REGIONAL_ACCESS = 5 - HIPAA = 6 - HITRUST = 7 - EU_REGIONS_AND_SUPPORT = 8 - CA_REGIONS_AND_SUPPORT = 9 - ITAR = 10 - AU_REGIONS_AND_US_SUPPORT = 11 - - class KajEnrollmentState(proto.Enum): - r"""Key Access Justifications(KAJ) Enrollment State. - - Values: - KAJ_ENROLLMENT_STATE_UNSPECIFIED (0): - Default State for KAJ Enrollment. - KAJ_ENROLLMENT_STATE_PENDING (1): - Pending State for KAJ Enrollment. - KAJ_ENROLLMENT_STATE_COMPLETE (2): - Complete State for KAJ Enrollment. - """ - KAJ_ENROLLMENT_STATE_UNSPECIFIED = 0 - KAJ_ENROLLMENT_STATE_PENDING = 1 - KAJ_ENROLLMENT_STATE_COMPLETE = 2 - - class ResourceInfo(proto.Message): - r"""Represent the resources that are children of this Workload. - - Attributes: - resource_id (int): - Resource identifier. For a project this represents - project_number. - resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType): - Indicates the type of resource. - """ - class ResourceType(proto.Enum): - r"""The type of resource. - - Values: - RESOURCE_TYPE_UNSPECIFIED (0): - Unknown resource type. - CONSUMER_PROJECT (1): - Deprecated. Existing workloads will continue - to support this, but new CreateWorkloadRequests - should not specify this as an input value. - CONSUMER_FOLDER (4): - Consumer Folder. - ENCRYPTION_KEYS_PROJECT (2): - Consumer project containing encryption keys. - KEYRING (3): - Keyring resource that hosts encryption keys. - """ - RESOURCE_TYPE_UNSPECIFIED = 0 - CONSUMER_PROJECT = 1 - CONSUMER_FOLDER = 4 - ENCRYPTION_KEYS_PROJECT = 2 - KEYRING = 3 - - resource_id: int = proto.Field( - proto.INT64, - number=1, - ) - resource_type: 'Workload.ResourceInfo.ResourceType' = proto.Field( - proto.ENUM, - number=2, - enum='Workload.ResourceInfo.ResourceType', - ) - - class KMSSettings(proto.Message): - r"""Settings specific to the Key Management Service. - - Attributes: - next_rotation_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Input only. Immutable. The time at - which the Key Management Service will - automatically create a new version of the crypto - key and mark it as the primary. - rotation_period (google.protobuf.duration_pb2.Duration): - Required. Input only. Immutable. [next_rotation_time] will - be advanced by this period when the Key Management Service - automatically rotates a key. Must be at least 24 hours and - at most 876,000 hours. - """ - - next_rotation_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - rotation_period: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - class IL4Settings(proto.Message): - r"""Settings specific to resources needed for IL4. - - Attributes: - kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): - Input only. Immutable. Settings used to - create a CMEK crypto key. - """ - - kms_settings: 'Workload.KMSSettings' = proto.Field( - proto.MESSAGE, - number=1, - message='Workload.KMSSettings', - ) - - class CJISSettings(proto.Message): - r"""Settings specific to resources needed for CJIS. - - Attributes: - kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): - Input only. Immutable. Settings used to - create a CMEK crypto key. - """ - - kms_settings: 'Workload.KMSSettings' = proto.Field( - proto.MESSAGE, - number=1, - message='Workload.KMSSettings', - ) - - class FedrampHighSettings(proto.Message): - r"""Settings specific to resources needed for FedRAMP High. - - Attributes: - kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): - Input only. Immutable. Settings used to - create a CMEK crypto key. - """ - - kms_settings: 'Workload.KMSSettings' = proto.Field( - proto.MESSAGE, - number=1, - message='Workload.KMSSettings', - ) - - class FedrampModerateSettings(proto.Message): - r"""Settings specific to resources needed for FedRAMP Moderate. - - Attributes: - kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings): - Input only. Immutable. Settings used to - create a CMEK crypto key. - """ - - kms_settings: 'Workload.KMSSettings' = proto.Field( - proto.MESSAGE, - number=1, - message='Workload.KMSSettings', - ) - - class ResourceSettings(proto.Message): - r"""Represent the custom settings for the resources to be - created. - - Attributes: - resource_id (str): - Resource identifier. For a project this represents - project_id. If the project is already taken, the workload - creation will fail. For KeyRing, this represents the - keyring_id. For a folder, don't set this value as folder_id - is assigned by Google. - resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType): - Indicates the type of resource. This field should be - specified to correspond the id to the right project type - (CONSUMER_PROJECT or ENCRYPTION_KEYS_PROJECT) - display_name (str): - User-assigned resource display name. - If not empty it will be used to create a - resource with the specified name. - """ - - resource_id: str = proto.Field( - proto.STRING, - number=1, - ) - resource_type: 'Workload.ResourceInfo.ResourceType' = proto.Field( - proto.ENUM, - number=2, - enum='Workload.ResourceInfo.ResourceType', - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - - class SaaEnrollmentResponse(proto.Message): - r"""Signed Access Approvals (SAA) enrollment response. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - setup_status (google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse.SetupState): - Indicates SAA enrollment status of a given - workload. - - This field is a member of `oneof`_ ``_setup_status``. - setup_errors (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.SaaEnrollmentResponse.SetupError]): - Indicates SAA enrollment setup error if any. - """ - class SetupState(proto.Enum): - r"""Setup state of SAA enrollment. - - Values: - SETUP_STATE_UNSPECIFIED (0): - Unspecified. - STATUS_PENDING (1): - SAA enrollment pending. - STATUS_COMPLETE (2): - SAA enrollment comopleted. - """ - SETUP_STATE_UNSPECIFIED = 0 - STATUS_PENDING = 1 - STATUS_COMPLETE = 2 - - class SetupError(proto.Enum): - r"""Setup error of SAA enrollment. - - Values: - SETUP_ERROR_UNSPECIFIED (0): - Unspecified. - ERROR_INVALID_BASE_SETUP (1): - Invalid states for all customers, to be - redirected to AA UI for additional details. - ERROR_MISSING_EXTERNAL_SIGNING_KEY (2): - Returned when there is not an EKM key - configured. - ERROR_NOT_ALL_SERVICES_ENROLLED (3): - Returned when there are no enrolled services - or the customer is enrolled in CAA only for a - subset of services. - ERROR_SETUP_CHECK_FAILED (4): - Returned when exception was encountered - during evaluation of other criteria. - """ - SETUP_ERROR_UNSPECIFIED = 0 - ERROR_INVALID_BASE_SETUP = 1 - ERROR_MISSING_EXTERNAL_SIGNING_KEY = 2 - ERROR_NOT_ALL_SERVICES_ENROLLED = 3 - ERROR_SETUP_CHECK_FAILED = 4 - - setup_status: 'Workload.SaaEnrollmentResponse.SetupState' = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum='Workload.SaaEnrollmentResponse.SetupState', - ) - setup_errors: MutableSequence['Workload.SaaEnrollmentResponse.SetupError'] = proto.RepeatedField( - proto.ENUM, - number=2, - enum='Workload.SaaEnrollmentResponse.SetupError', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - resources: MutableSequence[ResourceInfo] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=ResourceInfo, - ) - compliance_regime: ComplianceRegime = proto.Field( - proto.ENUM, - number=4, - enum=ComplianceRegime, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - billing_account: str = proto.Field( - proto.STRING, - number=6, - ) - il4_settings: IL4Settings = proto.Field( - proto.MESSAGE, - number=7, - oneof='compliance_regime_settings', - message=IL4Settings, - ) - cjis_settings: CJISSettings = proto.Field( - proto.MESSAGE, - number=8, - oneof='compliance_regime_settings', - message=CJISSettings, - ) - fedramp_high_settings: FedrampHighSettings = proto.Field( - proto.MESSAGE, - number=11, - oneof='compliance_regime_settings', - message=FedrampHighSettings, - ) - fedramp_moderate_settings: FedrampModerateSettings = proto.Field( - proto.MESSAGE, - number=12, - oneof='compliance_regime_settings', - message=FedrampModerateSettings, - ) - etag: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - provisioned_resources_parent: str = proto.Field( - proto.STRING, - number=13, - ) - kms_settings: KMSSettings = proto.Field( - proto.MESSAGE, - number=14, - message=KMSSettings, - ) - resource_settings: MutableSequence[ResourceSettings] = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=ResourceSettings, - ) - kaj_enrollment_state: KajEnrollmentState = proto.Field( - proto.ENUM, - number=17, - enum=KajEnrollmentState, - ) - enable_sovereign_controls: bool = proto.Field( - proto.BOOL, - number=18, - ) - saa_enrollment_response: SaaEnrollmentResponse = proto.Field( - proto.MESSAGE, - number=20, - message=SaaEnrollmentResponse, - ) - compliant_but_disallowed_services: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=24, - ) - - -class CreateWorkloadOperationMetadata(proto.Message): - r"""Operation metadata to give request details of CreateWorkload. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Time when the operation was - created. - display_name (str): - Optional. The display name of the workload. - parent (str): - Optional. The parent of the workload. - compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime): - Optional. Compliance controls that should be - applied to the resources managed by the - workload. - resource_settings (MutableSequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]): - Optional. Resource properties in the input - that are used for creating/customizing workload - resources. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - parent: str = proto.Field( - proto.STRING, - number=3, - ) - compliance_regime: 'Workload.ComplianceRegime' = proto.Field( - proto.ENUM, - number=4, - enum='Workload.ComplianceRegime', - ) - resource_settings: MutableSequence['Workload.ResourceSettings'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='Workload.ResourceSettings', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_service.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_service.py deleted file mode 100644 index c96387143c1d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_service.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.assuredworkloads.v1beta1', - manifest={ - }, -) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/noxfile.py deleted file mode 100644 index ec3fe96b96bd..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-assured-workloads' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/assuredworkloads_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/assuredworkloads_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_async.py deleted file mode 100644 index 4389b3e64730..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeWorkloadMove -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_AnalyzeWorkloadMove_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_analyze_workload_move(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.AnalyzeWorkloadMoveRequest( - source="source_value", - target="target_value", - ) - - # Make the request - response = await client.analyze_workload_move(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_AnalyzeWorkloadMove_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_sync.py deleted file mode 100644 index d0d7b5189865..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AnalyzeWorkloadMove -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_AnalyzeWorkloadMove_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_analyze_workload_move(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.AnalyzeWorkloadMoveRequest( - source="source_value", - target="target_value", - ) - - # Make the request - response = client.analyze_workload_move(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_AnalyzeWorkloadMove_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_async.py deleted file mode 100644 index 4ff69a0c2003..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_CreateWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_create_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_CreateWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_sync.py deleted file mode 100644 index c09df3f942e7..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_CreateWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_create_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.CreateWorkloadRequest( - parent="parent_value", - workload=workload, - ) - - # Make the request - operation = client.create_workload(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_CreateWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_async.py deleted file mode 100644 index 226041be159a..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_DeleteWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - await client.delete_workload(request=request) - - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_DeleteWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_sync.py deleted file mode 100644 index 18ae35194713..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_DeleteWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_delete_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.DeleteWorkloadRequest( - name="name_value", - ) - - # Make the request - client.delete_workload(request=request) - - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_DeleteWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_async.py deleted file mode 100644 index 4b4f57100cfb..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_GetWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_get_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = await client.get_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_GetWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_sync.py deleted file mode 100644 index 61145ed0156d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_GetWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_get_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.GetWorkloadRequest( - name="name_value", - ) - - # Make the request - response = client.get_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_GetWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_async.py deleted file mode 100644 index 31fa39362f8d..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkloads -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_ListWorkloads_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_ListWorkloads_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_sync.py deleted file mode 100644 index 79dfe6626195..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListWorkloads -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_ListWorkloads_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_list_workloads(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.ListWorkloadsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_workloads(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_ListWorkloads_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_async.py deleted file mode 100644 index 8d62ebff4d61..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestrictAllowedResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_RestrictAllowedResources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = await client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_RestrictAllowedResources_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_sync.py deleted file mode 100644 index ad7363e985bb..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestrictAllowedResources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_RestrictAllowedResources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_restrict_allowed_resources(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - request = assuredworkloads_v1beta1.RestrictAllowedResourcesRequest( - name="name_value", - restriction_type="ALLOW_COMPLIANT_RESOURCES", - ) - - # Make the request - response = client.restrict_allowed_resources(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_RestrictAllowedResources_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_async.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_async.py deleted file mode 100644 index 7d37ecbc5215..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_UpdateWorkload_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -async def sample_update_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = await client.update_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_UpdateWorkload_async] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_sync.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_sync.py deleted file mode 100644 index 993042487402..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateWorkload -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-assured-workloads - - -# [START assuredworkloads_v1beta1_generated_AssuredWorkloadsService_UpdateWorkload_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import assuredworkloads_v1beta1 - - -def sample_update_workload(): - # Create a client - client = assuredworkloads_v1beta1.AssuredWorkloadsServiceClient() - - # Initialize request argument(s) - workload = assuredworkloads_v1beta1.Workload() - workload.display_name = "display_name_value" - workload.compliance_regime = "AU_REGIONS_AND_US_SUPPORT" - - request = assuredworkloads_v1beta1.UpdateWorkloadRequest( - workload=workload, - ) - - # Make the request - response = client.update_workload(request=request) - - # Handle the response - print(response) - -# [END assuredworkloads_v1beta1_generated_AssuredWorkloadsService_UpdateWorkload_sync] diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json deleted file mode 100644 index d4f3e3793a9e..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json +++ /dev/null @@ -1,1152 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.assuredworkloads.v1beta1", - "version": "v1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-assured-workloads", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.analyze_workload_move", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.AnalyzeWorkloadMove", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "AnalyzeWorkloadMove" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "target", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveResponse", - "shortName": "analyze_workload_move" - }, - "description": "Sample for AnalyzeWorkloadMove", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_AnalyzeWorkloadMove_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.analyze_workload_move", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.AnalyzeWorkloadMove", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "AnalyzeWorkloadMove" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "target", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.AnalyzeWorkloadMoveResponse", - "shortName": "analyze_workload_move" - }, - "description": "Sample for AnalyzeWorkloadMove", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_AnalyzeWorkloadMove_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_analyze_workload_move_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.create_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.CreateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "CreateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1beta1.types.Workload" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_workload" - }, - "description": "Sample for CreateWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_CreateWorkload_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.create_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.CreateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "CreateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1beta1.types.Workload" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_workload" - }, - "description": "Sample for CreateWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_CreateWorkload_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_create_workload_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.delete_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.DeleteWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "DeleteWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_workload" - }, - "description": "Sample for DeleteWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_DeleteWorkload_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.delete_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.DeleteWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "DeleteWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_workload" - }, - "description": "Sample for DeleteWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_DeleteWorkload_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_delete_workload_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.get_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.GetWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "GetWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.Workload", - "shortName": "get_workload" - }, - "description": "Sample for GetWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_GetWorkload_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.get_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.GetWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "GetWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.Workload", - "shortName": "get_workload" - }, - "description": "Sample for GetWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_GetWorkload_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_get_workload_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.list_workloads", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.ListWorkloads", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "ListWorkloads" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager", - "shortName": "list_workloads" - }, - "description": "Sample for ListWorkloads", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_ListWorkloads_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.list_workloads", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.ListWorkloads", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "ListWorkloads" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsPager", - "shortName": "list_workloads" - }, - "description": "Sample for ListWorkloads", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_ListWorkloads_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_list_workloads_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.restrict_allowed_resources", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.RestrictAllowedResources", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "RestrictAllowedResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesResponse", - "shortName": "restrict_allowed_resources" - }, - "description": "Sample for RestrictAllowedResources", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_RestrictAllowedResources_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.restrict_allowed_resources", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.RestrictAllowedResources", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "RestrictAllowedResources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.RestrictAllowedResourcesResponse", - "shortName": "restrict_allowed_resources" - }, - "description": "Sample for RestrictAllowedResources", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_RestrictAllowedResources_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_restrict_allowed_resources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient", - "shortName": "AssuredWorkloadsServiceAsyncClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceAsyncClient.update_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.UpdateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "UpdateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1beta1.types.Workload" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.Workload", - "shortName": "update_workload" - }, - "description": "Sample for UpdateWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_UpdateWorkload_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient", - "shortName": "AssuredWorkloadsServiceClient" - }, - "fullName": "google.cloud.assuredworkloads_v1beta1.AssuredWorkloadsServiceClient.update_workload", - "method": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService.UpdateWorkload", - "service": { - "fullName": "google.cloud.assuredworkloads.v1beta1.AssuredWorkloadsService", - "shortName": "AssuredWorkloadsService" - }, - "shortName": "UpdateWorkload" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest" - }, - { - "name": "workload", - "type": "google.cloud.assuredworkloads_v1beta1.types.Workload" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.assuredworkloads_v1beta1.types.Workload", - "shortName": "update_workload" - }, - "description": "Sample for UpdateWorkload", - "file": "assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "assuredworkloads_v1beta1_generated_AssuredWorkloadsService_UpdateWorkload_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "assuredworkloads_v1beta1_generated_assured_workloads_service_update_workload_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/scripts/fixup_assuredworkloads_v1beta1_keywords.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/scripts/fixup_assuredworkloads_v1beta1_keywords.py deleted file mode 100644 index 2d5d6ffca5d0..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/scripts/fixup_assuredworkloads_v1beta1_keywords.py +++ /dev/null @@ -1,182 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assuredworkloadsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_workload_move': ('target', 'source', 'project', ), - 'create_workload': ('parent', 'workload', 'external_id', ), - 'delete_workload': ('name', 'etag', ), - 'get_workload': ('name', ), - 'list_workloads': ('parent', 'page_size', 'page_token', 'filter', ), - 'restrict_allowed_resources': ('name', 'restriction_type', ), - 'update_workload': ('workload', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assuredworkloadsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the assuredworkloads client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/setup.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/setup.py deleted file mode 100644 index d6c9dac04ce9..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-assured-workloads' - - -description = "Google Cloud Assured Workloads API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/assuredworkloads/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-assured-workloads" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py deleted file mode 100644 index 89361fa09448..000000000000 --- a/owl-bot-staging/google-cloud-assured-workloads/v1beta1/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py +++ /dev/null @@ -1,5543 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import AssuredWorkloadsServiceAsyncClient -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import AssuredWorkloadsServiceClient -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import pagers -from google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service import transports -from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(None) is None - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AssuredWorkloadsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssuredWorkloadsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AssuredWorkloadsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AssuredWorkloadsServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AssuredWorkloadsServiceClient._get_client_cert_source(None, False) is None - assert AssuredWorkloadsServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AssuredWorkloadsServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AssuredWorkloadsServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AssuredWorkloadsServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AssuredWorkloadsServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssuredWorkloadsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AssuredWorkloadsServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AssuredWorkloadsServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AssuredWorkloadsServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AssuredWorkloadsServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AssuredWorkloadsServiceClient._get_universe_domain(None, None) == AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AssuredWorkloadsServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AssuredWorkloadsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AssuredWorkloadsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssuredWorkloadsServiceClient, "grpc"), - (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, "rest"), -]) -def test_assured_workloads_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://assuredworkloads.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), - (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AssuredWorkloadsServiceRestTransport, "rest"), -]) -def test_assured_workloads_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AssuredWorkloadsServiceClient, "grpc"), - (AssuredWorkloadsServiceAsyncClient, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, "rest"), -]) -def test_assured_workloads_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://assuredworkloads.googleapis.com' - ) - - -def test_assured_workloads_service_client_get_transport_class(): - transport = AssuredWorkloadsServiceClient.get_transport_class() - available_transports = [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceRestTransport, - ] - assert transport in available_transports - - transport = AssuredWorkloadsServiceClient.get_transport_class("grpc") - assert transport == transports.AssuredWorkloadsServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest"), -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -def test_assured_workloads_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AssuredWorkloadsServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AssuredWorkloadsServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", "true"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", "false"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest", "true"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_assured_workloads_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssuredWorkloadsServiceAsyncClient)) -def test_assured_workloads_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient -]) -@mock.patch.object(AssuredWorkloadsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceClient)) -@mock.patch.object(AssuredWorkloadsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssuredWorkloadsServiceAsyncClient)) -def test_assured_workloads_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AssuredWorkloadsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AssuredWorkloadsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc"), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest"), -]) -def test_assured_workloads_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", grpc_helpers), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceRestTransport, "rest", None), -]) -def test_assured_workloads_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_assured_workloads_service_client_client_options_from_dict(): - with mock.patch('google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AssuredWorkloadsServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport, "grpc", grpc_helpers), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_assured_workloads_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "assuredworkloads.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="assuredworkloads.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.CreateWorkloadRequest, - dict, -]) -def test_create_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.CreateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.CreateWorkloadRequest( - parent='parent_value', - external_id='external_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.CreateWorkloadRequest( - parent='parent_value', - external_id='external_id_value', - ) - -def test_create_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_workload] = mock_rpc - request = {} - client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_workload] = mock_rpc - - request = {} - await client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.CreateWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.CreateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_workload_async_from_dict(): - await test_create_workload_async(request_type=dict) - -def test_create_workload_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.CreateWorkloadRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_workload_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.CreateWorkloadRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_workload( - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - - -def test_create_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_workload( - assuredworkloads.CreateWorkloadRequest(), - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_workload( - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_workload( - assuredworkloads.CreateWorkloadRequest(), - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.UpdateWorkloadRequest, - dict, -]) -def test_update_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - ) - response = client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.UpdateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - - -def test_update_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.UpdateWorkloadRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.UpdateWorkloadRequest( - ) - -def test_update_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_workload] = mock_rpc - request = {} - client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_workload] = mock_rpc - - request = {} - await client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.UpdateWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - )) - response = await client.update_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.UpdateWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - - -@pytest.mark.asyncio -async def test_update_workload_async_from_dict(): - await test_update_workload_async(request_type=dict) - - -def test_update_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_workload( - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_workload( - assuredworkloads.UpdateWorkloadRequest(), - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_workload( - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].workload - mock_val = assuredworkloads.Workload(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_workload( - assuredworkloads.UpdateWorkloadRequest(), - workload=assuredworkloads.Workload(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.RestrictAllowedResourcesRequest, - dict, -]) -def test_restrict_allowed_resources(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.RestrictAllowedResourcesResponse( - ) - response = client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.RestrictAllowedResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) - - -def test_restrict_allowed_resources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.RestrictAllowedResourcesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.restrict_allowed_resources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.RestrictAllowedResourcesRequest( - name='name_value', - ) - -def test_restrict_allowed_resources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restrict_allowed_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restrict_allowed_resources] = mock_rpc - request = {} - client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.restrict_allowed_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.restrict_allowed_resources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.restrict_allowed_resources] = mock_rpc - - request = {} - await client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.restrict_allowed_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.RestrictAllowedResourcesRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.RestrictAllowedResourcesResponse( - )) - response = await client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.RestrictAllowedResourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) - - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_async_from_dict(): - await test_restrict_allowed_resources_async(request_type=dict) - -def test_restrict_allowed_resources_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.RestrictAllowedResourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() - client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restrict_allowed_resources_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.RestrictAllowedResourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.RestrictAllowedResourcesResponse()) - await client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.DeleteWorkloadRequest, - dict, -]) -def test_delete_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.DeleteWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.DeleteWorkloadRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.DeleteWorkloadRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_workload] = mock_rpc - request = {} - client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_workload] = mock_rpc - - request = {} - await client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.DeleteWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.DeleteWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_workload_async_from_dict(): - await test_delete_workload_async(request_type=dict) - -def test_delete_workload_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.DeleteWorkloadRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value = None - client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_workload_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = assuredworkloads.DeleteWorkloadRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_workload( - assuredworkloads.DeleteWorkloadRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_workload( - assuredworkloads.DeleteWorkloadRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.GetWorkloadRequest, - dict, -]) -def test_get_workload(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - ) - response = client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.GetWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - - -def test_get_workload_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.GetWorkloadRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_workload(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.GetWorkloadRequest( - name='name_value', - ) - -def test_get_workload_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_workload] = mock_rpc - request = {} - client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_workload_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_workload in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_workload] = mock_rpc - - request = {} - await client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_workload_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.GetWorkloadRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - )) - response = await client.get_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.GetWorkloadRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.Workload) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.compliance_regime == assuredworkloads.Workload.ComplianceRegime.IL4 - assert response.billing_account == 'billing_account_value' - assert response.etag == 'etag_value' - assert response.provisioned_resources_parent == 'provisioned_resources_parent_value' - assert response.kaj_enrollment_state == assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING - assert response.enable_sovereign_controls is True - assert response.compliant_but_disallowed_services == ['compliant_but_disallowed_services_value'] - - -@pytest.mark.asyncio -async def test_get_workload_async_from_dict(): - await test_get_workload_async(request_type=dict) - - -def test_get_workload_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_workload_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_workload( - assuredworkloads.GetWorkloadRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_workload_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.Workload() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_workload( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_workload_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_workload( - assuredworkloads.GetWorkloadRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.AnalyzeWorkloadMoveRequest, - dict, -]) -def test_analyze_workload_move(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse( - blockers=['blockers_value'], - ) - response = client.analyze_workload_move(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.AnalyzeWorkloadMoveRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.AnalyzeWorkloadMoveResponse) - assert response.blockers == ['blockers_value'] - - -def test_analyze_workload_move_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.AnalyzeWorkloadMoveRequest( - source='source_value', - project='project_value', - target='target_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_workload_move(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.AnalyzeWorkloadMoveRequest( - source='source_value', - project='project_value', - target='target_value', - ) - -def test_analyze_workload_move_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.analyze_workload_move in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.analyze_workload_move] = mock_rpc - request = {} - client.analyze_workload_move(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.analyze_workload_move(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_workload_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.analyze_workload_move in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.analyze_workload_move] = mock_rpc - - request = {} - await client.analyze_workload_move(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.analyze_workload_move(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_analyze_workload_move_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.AnalyzeWorkloadMoveRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.AnalyzeWorkloadMoveResponse( - blockers=['blockers_value'], - )) - response = await client.analyze_workload_move(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.AnalyzeWorkloadMoveRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.AnalyzeWorkloadMoveResponse) - assert response.blockers == ['blockers_value'] - - -@pytest.mark.asyncio -async def test_analyze_workload_move_async_from_dict(): - await test_analyze_workload_move_async(request_type=dict) - - -def test_analyze_workload_move_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.analyze_workload_move( - project='project_value', - target='target_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].target - mock_val = 'target_value' - assert arg == mock_val - assert args[0].project == 'project_value' - - -def test_analyze_workload_move_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.analyze_workload_move( - assuredworkloads.AnalyzeWorkloadMoveRequest(), - project='project_value', - target='target_value', - ) - -@pytest.mark.asyncio -async def test_analyze_workload_move_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.AnalyzeWorkloadMoveResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.analyze_workload_move( - project='project_value', - target='target_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].target - mock_val = 'target_value' - assert arg == mock_val - assert args[0].project == 'project_value' - -@pytest.mark.asyncio -async def test_analyze_workload_move_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.analyze_workload_move( - assuredworkloads.AnalyzeWorkloadMoveRequest(), - project='project_value', - target='target_value', - ) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.ListWorkloadsRequest, - dict, -]) -def test_list_workloads(request_type, transport: str = 'grpc'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = assuredworkloads.ListWorkloadsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkloadsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_workloads_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = assuredworkloads.ListWorkloadsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_workloads(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == assuredworkloads.ListWorkloadsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_workloads_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_workloads in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_workloads] = mock_rpc - request = {} - client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_workloads(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_workloads_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_workloads in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_workloads] = mock_rpc - - request = {} - await client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_workloads(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_workloads_async(transport: str = 'grpc_asyncio', request_type=assuredworkloads.ListWorkloadsRequest): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_workloads(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = assuredworkloads.ListWorkloadsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListWorkloadsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_workloads_async_from_dict(): - await test_list_workloads_async(request_type=dict) - - -def test_list_workloads_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListWorkloadsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_workloads( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_workloads_flattened_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_workloads( - assuredworkloads.ListWorkloadsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_workloads_flattened_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = assuredworkloads.ListWorkloadsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_workloads( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_workloads_flattened_error_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_workloads( - assuredworkloads.ListWorkloadsRequest(), - parent='parent_value', - ) - - -def test_list_workloads_pager(transport_name: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - pager = client.list_workloads(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assuredworkloads.Workload) - for i in results) -def test_list_workloads_pages(transport_name: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - pages = list(client.list_workloads(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_workloads_async_pager(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_workloads(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, assuredworkloads.Workload) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_workloads_async_pages(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - next_page_token='abc', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[], - next_page_token='def', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - ], - next_page_token='ghi', - ), - assuredworkloads.ListWorkloadsResponse( - workloads=[ - assuredworkloads.Workload(), - assuredworkloads.Workload(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_workloads(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_workload_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_workload] = mock_rpc - - request = {} - client.create_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_workload_rest_required_fields(request_type=assuredworkloads.CreateWorkloadRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_workload._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("external_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_workload(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_workload_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(("externalId", )) & set(("parent", "workload", ))) - - -def test_create_workload_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'organizations/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_workload(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=organizations/*/locations/*}/workloads" % client.transport._host, args[1]) - - -def test_create_workload_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_workload( - assuredworkloads.CreateWorkloadRequest(), - parent='parent_value', - workload=assuredworkloads.Workload(name='name_value'), - ) - - -def test_update_workload_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.UpdateWorkloadRequest() - with pytest.raises(RuntimeError): - client.update_workload(request) - - -def test_restrict_allowed_resources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restrict_allowed_resources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restrict_allowed_resources] = mock_rpc - - request = {} - client.restrict_allowed_resources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.restrict_allowed_resources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restrict_allowed_resources_rest_required_fields(request_type=assuredworkloads.RestrictAllowedResourcesRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restrict_allowed_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restrict_allowed_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.RestrictAllowedResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restrict_allowed_resources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restrict_allowed_resources_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restrict_allowed_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "restrictionType", ))) - - -def test_delete_workload_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_workload in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_workload] = mock_rpc - - request = {} - client.delete_workload(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_workload(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_workload_rest_required_fields(request_type=assuredworkloads.DeleteWorkloadRequest): - transport_class = transports.AssuredWorkloadsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_workload._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_workload._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_workload(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_workload_rest_unset_required_fields(): - transport = transports.AssuredWorkloadsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_workload._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag", )) & set(("name", ))) - - -def test_delete_workload_rest_flattened(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_workload(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=organizations/*/locations/*/workloads/*}" % client.transport._host, args[1]) - - -def test_delete_workload_rest_flattened_error(transport: str = 'rest'): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_workload( - assuredworkloads.DeleteWorkloadRequest(), - name='name_value', - ) - - -def test_get_workload_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.GetWorkloadRequest() - with pytest.raises(RuntimeError): - client.get_workload(request) - - -def test_analyze_workload_move_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.AnalyzeWorkloadMoveRequest() - with pytest.raises(RuntimeError): - client.analyze_workload_move(request) - - -def test_list_workloads_rest_no_http_options(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = assuredworkloads.ListWorkloadsRequest() - with pytest.raises(RuntimeError): - client.list_workloads(request) - - -def test_update_workload_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.update_workload({}) - assert ( - "Method UpdateWorkload is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_get_workload_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.get_workload({}) - assert ( - "Method GetWorkload is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_analyze_workload_move_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.analyze_workload_move({}) - assert ( - "Method AnalyzeWorkloadMove is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_list_workloads_rest_error(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.list_workloads({}) - assert ( - "Method ListWorkloads is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AssuredWorkloadsServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, - transports.AssuredWorkloadsServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AssuredWorkloadsServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.CreateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - call.return_value = assuredworkloads.Workload() - client.update_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.UpdateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restrict_allowed_resources_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - call.return_value = assuredworkloads.RestrictAllowedResourcesResponse() - client.restrict_allowed_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.RestrictAllowedResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - call.return_value = None - client.delete_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.DeleteWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_workload_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - call.return_value = assuredworkloads.Workload() - client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_workload_move_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - call.return_value = assuredworkloads.AnalyzeWorkloadMoveResponse() - client.analyze_workload_move(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.AnalyzeWorkloadMoveRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_workloads_empty_call_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - call.return_value = assuredworkloads.ListWorkloadsResponse() - client.list_workloads(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListWorkloadsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AssuredWorkloadsServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.CreateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - )) - await client.update_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.UpdateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restrict_allowed_resources_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.RestrictAllowedResourcesResponse( - )) - await client.restrict_allowed_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.RestrictAllowedResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.DeleteWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_workload_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.Workload( - name='name_value', - display_name='display_name_value', - compliance_regime=assuredworkloads.Workload.ComplianceRegime.IL4, - billing_account='billing_account_value', - etag='etag_value', - provisioned_resources_parent='provisioned_resources_parent_value', - kaj_enrollment_state=assuredworkloads.Workload.KajEnrollmentState.KAJ_ENROLLMENT_STATE_PENDING, - enable_sovereign_controls=True, - compliant_but_disallowed_services=['compliant_but_disallowed_services_value'], - )) - await client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_workload_move_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.AnalyzeWorkloadMoveResponse( - blockers=['blockers_value'], - )) - await client.analyze_workload_move(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.AnalyzeWorkloadMoveRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_workloads_empty_call_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(assuredworkloads.ListWorkloadsResponse( - next_page_token='next_page_token_value', - )) - await client.list_workloads(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListWorkloadsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AssuredWorkloadsServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_workload_rest_bad_request(request_type=assuredworkloads.CreateWorkloadRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_workload(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.CreateWorkloadRequest, - dict, -]) -def test_create_workload_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'organizations/sample1/locations/sample2'} - request_init["workload"] = {'name': 'name_value', 'display_name': 'display_name_value', 'resources': [{'resource_id': 1172, 'resource_type': 1}], 'compliance_regime': 1, 'create_time': {'seconds': 751, 'nanos': 543}, 'billing_account': 'billing_account_value', 'il4_settings': {'kms_settings': {'next_rotation_time': {}, 'rotation_period': {'seconds': 751, 'nanos': 543}}}, 'cjis_settings': {'kms_settings': {}}, 'fedramp_high_settings': {'kms_settings': {}}, 'fedramp_moderate_settings': {'kms_settings': {}}, 'etag': 'etag_value', 'labels': {}, 'provisioned_resources_parent': 'provisioned_resources_parent_value', 'kms_settings': {}, 'resource_settings': [{'resource_id': 'resource_id_value', 'resource_type': 1, 'display_name': 'display_name_value'}], 'kaj_enrollment_state': 1, 'enable_sovereign_controls': True, 'saa_enrollment_response': {'setup_status': 1, 'setup_errors': [1]}, 'compliant_but_disallowed_services': ['compliant_but_disallowed_services_value1', 'compliant_but_disallowed_services_value2']} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = assuredworkloads.CreateWorkloadRequest.meta.fields["workload"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["workload"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["workload"][field])): - del request_init["workload"][field][i][subfield] - else: - del request_init["workload"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_workload(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_workload_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_create_workload_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_create_workload") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.CreateWorkloadRequest.pb(assuredworkloads.CreateWorkloadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = assuredworkloads.CreateWorkloadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_workload(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_workload_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.update_workload({}) - assert ( - "Method UpdateWorkload is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_restrict_allowed_resources_rest_bad_request(request_type=assuredworkloads.RestrictAllowedResourcesRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restrict_allowed_resources(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.RestrictAllowedResourcesRequest, - dict, -]) -def test_restrict_allowed_resources_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = assuredworkloads.RestrictAllowedResourcesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = assuredworkloads.RestrictAllowedResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restrict_allowed_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, assuredworkloads.RestrictAllowedResourcesResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restrict_allowed_resources_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_restrict_allowed_resources") as post, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "post_restrict_allowed_resources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_restrict_allowed_resources") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = assuredworkloads.RestrictAllowedResourcesRequest.pb(assuredworkloads.RestrictAllowedResourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = assuredworkloads.RestrictAllowedResourcesResponse.to_json(assuredworkloads.RestrictAllowedResourcesResponse()) - req.return_value.content = return_value - - request = assuredworkloads.RestrictAllowedResourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = assuredworkloads.RestrictAllowedResourcesResponse() - post_with_metadata.return_value = assuredworkloads.RestrictAllowedResourcesResponse(), metadata - - client.restrict_allowed_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_workload_rest_bad_request(request_type=assuredworkloads.DeleteWorkloadRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_workload(request) - - -@pytest.mark.parametrize("request_type", [ - assuredworkloads.DeleteWorkloadRequest, - dict, -]) -def test_delete_workload_rest_call_success(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'organizations/sample1/locations/sample2/workloads/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_workload(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_workload_rest_interceptors(null_interceptor): - transport = transports.AssuredWorkloadsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssuredWorkloadsServiceRestInterceptor(), - ) - client = AssuredWorkloadsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssuredWorkloadsServiceRestInterceptor, "pre_delete_workload") as pre: - pre.assert_not_called() - pb_message = assuredworkloads.DeleteWorkloadRequest.pb(assuredworkloads.DeleteWorkloadRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = assuredworkloads.DeleteWorkloadRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_workload(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_workload_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.get_workload({}) - assert ( - "Method GetWorkload is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_analyze_workload_move_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.analyze_workload_move({}) - assert ( - "Method AnalyzeWorkloadMove is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_list_workloads_rest_error(): - - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - with pytest.raises(NotImplementedError) as not_implemented_error: - client.list_workloads({}) - assert ( - "Method ListWorkloads is not available over REST transport" - in str(not_implemented_error.value) - ) - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'organizations/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'organizations/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'organizations/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'organizations/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_workload), - '__call__') as call: - client.create_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.CreateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_workload), - '__call__') as call: - client.update_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.UpdateWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restrict_allowed_resources_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restrict_allowed_resources), - '__call__') as call: - client.restrict_allowed_resources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.RestrictAllowedResourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_workload), - '__call__') as call: - client.delete_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.DeleteWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_workload_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_workload), - '__call__') as call: - client.get_workload(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.GetWorkloadRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_analyze_workload_move_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.analyze_workload_move), - '__call__') as call: - client.analyze_workload_move(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.AnalyzeWorkloadMoveRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_workloads_empty_call_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_workloads), - '__call__') as call: - client.list_workloads(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = assuredworkloads.ListWorkloadsRequest() - - assert args[0] == request_msg - - -def test_assured_workloads_service_rest_lro_client(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AssuredWorkloadsServiceGrpcTransport, - ) - -def test_assured_workloads_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AssuredWorkloadsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_assured_workloads_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AssuredWorkloadsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_workload', - 'update_workload', - 'restrict_allowed_resources', - 'delete_workload', - 'get_workload', - 'analyze_workload_move', - 'list_workloads', - 'get_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_assured_workloads_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssuredWorkloadsServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_assured_workloads_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.transports.AssuredWorkloadsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssuredWorkloadsServiceTransport() - adc.assert_called_once() - - -def test_assured_workloads_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssuredWorkloadsServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, - ], -) -def test_assured_workloads_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssuredWorkloadsServiceGrpcTransport, - transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, - transports.AssuredWorkloadsServiceRestTransport, - ], -) -def test_assured_workloads_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AssuredWorkloadsServiceGrpcTransport, grpc_helpers), - (transports.AssuredWorkloadsServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_assured_workloads_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "assuredworkloads.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="assuredworkloads.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AssuredWorkloadsServiceGrpcTransport, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport]) -def test_assured_workloads_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_assured_workloads_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AssuredWorkloadsServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_assured_workloads_service_host_no_port(transport_name): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='assuredworkloads.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://assuredworkloads.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_assured_workloads_service_host_with_port(transport_name): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='assuredworkloads.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'assuredworkloads.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://assuredworkloads.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_assured_workloads_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AssuredWorkloadsServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AssuredWorkloadsServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_workload._session - session2 = client2.transport.create_workload._session - assert session1 != session2 - session1 = client1.transport.update_workload._session - session2 = client2.transport.update_workload._session - assert session1 != session2 - session1 = client1.transport.restrict_allowed_resources._session - session2 = client2.transport.restrict_allowed_resources._session - assert session1 != session2 - session1 = client1.transport.delete_workload._session - session2 = client2.transport.delete_workload._session - assert session1 != session2 - session1 = client1.transport.get_workload._session - session2 = client2.transport.get_workload._session - assert session1 != session2 - session1 = client1.transport.analyze_workload_move._session - session2 = client2.transport.analyze_workload_move._session - assert session1 != session2 - session1 = client1.transport.list_workloads._session - session2 = client2.transport.list_workloads._session - assert session1 != session2 -def test_assured_workloads_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssuredWorkloadsServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_assured_workloads_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssuredWorkloadsServiceGrpcTransport, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport]) -def test_assured_workloads_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AssuredWorkloadsServiceGrpcTransport, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport]) -def test_assured_workloads_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_assured_workloads_service_grpc_lro_client(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_assured_workloads_service_grpc_lro_async_client(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_workload_path(): - organization = "squid" - location = "clam" - workload = "whelk" - expected = "organizations/{organization}/locations/{location}/workloads/{workload}".format(organization=organization, location=location, workload=workload, ) - actual = AssuredWorkloadsServiceClient.workload_path(organization, location, workload) - assert expected == actual - - -def test_parse_workload_path(): - expected = { - "organization": "octopus", - "location": "oyster", - "workload": "nudibranch", - } - path = AssuredWorkloadsServiceClient.workload_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_workload_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AssuredWorkloadsServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AssuredWorkloadsServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AssuredWorkloadsServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AssuredWorkloadsServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AssuredWorkloadsServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AssuredWorkloadsServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AssuredWorkloadsServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AssuredWorkloadsServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AssuredWorkloadsServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AssuredWorkloadsServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AssuredWorkloadsServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AssuredWorkloadsServiceTransport, '_prep_wrapped_messages') as prep: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AssuredWorkloadsServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AssuredWorkloadsServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_get_operation(transport: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AssuredWorkloadsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AssuredWorkloadsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AssuredWorkloadsServiceClient, transports.AssuredWorkloadsServiceGrpcTransport), - (AssuredWorkloadsServiceAsyncClient, transports.AssuredWorkloadsServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-automl/v1/.coveragerc b/owl-bot-staging/google-cloud-automl/v1/.coveragerc deleted file mode 100644 index 8705cefded5c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/automl/__init__.py - google/cloud/automl/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-automl/v1/.flake8 b/owl-bot-staging/google-cloud-automl/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-automl/v1/MANIFEST.in b/owl-bot-staging/google-cloud-automl/v1/MANIFEST.in deleted file mode 100644 index f376b2aa2804..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/automl *.py -recursive-include google/cloud/automl_v1 *.py diff --git a/owl-bot-staging/google-cloud-automl/v1/README.rst b/owl-bot-staging/google-cloud-automl/v1/README.rst deleted file mode 100644 index 69413aa1fb0f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Automl API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Automl API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-automl/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/auto_ml.rst b/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/auto_ml.rst deleted file mode 100644 index c8994a59cc1e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/auto_ml.rst +++ /dev/null @@ -1,10 +0,0 @@ -AutoMl ------------------------- - -.. automodule:: google.cloud.automl_v1.services.auto_ml - :members: - :inherited-members: - -.. automodule:: google.cloud.automl_v1.services.auto_ml.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/prediction_service.rst b/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/prediction_service.rst deleted file mode 100644 index d8f6da92bfce..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/prediction_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -PredictionService ------------------------------------ - -.. automodule:: google.cloud.automl_v1.services.prediction_service - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/services_.rst b/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/services_.rst deleted file mode 100644 index ce8e2c3d0519..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/services_.rst +++ /dev/null @@ -1,7 +0,0 @@ -Services for Google Cloud Automl v1 API -======================================= -.. toctree:: - :maxdepth: 2 - - auto_ml - prediction_service diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/types_.rst b/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/types_.rst deleted file mode 100644 index 14a31a9ebd14..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/automl_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Automl v1 API -==================================== - -.. automodule:: google.cloud.automl_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/conf.py b/owl-bot-staging/google-cloud-automl/v1/docs/conf.py deleted file mode 100644 index 92c859a77f79..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-automl documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-automl" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-automl-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-automl.tex", - u"google-cloud-automl Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-automl", - u"Google Cloud Automl Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-automl", - u"google-cloud-automl Documentation", - author, - "google-cloud-automl", - "GAPIC library for Google Cloud Automl API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-automl/v1/docs/index.rst b/owl-bot-staging/google-cloud-automl/v1/docs/index.rst deleted file mode 100644 index 2a4f4ab1ae15..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - automl_v1/services_ - automl_v1/types_ diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/__init__.py deleted file mode 100644 index 2335563d3513..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/__init__.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.automl import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.automl_v1.services.auto_ml.client import AutoMlClient -from google.cloud.automl_v1.services.auto_ml.async_client import AutoMlAsyncClient -from google.cloud.automl_v1.services.prediction_service.client import PredictionServiceClient -from google.cloud.automl_v1.services.prediction_service.async_client import PredictionServiceAsyncClient - -from google.cloud.automl_v1.types.annotation_payload import AnnotationPayload -from google.cloud.automl_v1.types.annotation_spec import AnnotationSpec -from google.cloud.automl_v1.types.classification import ClassificationAnnotation -from google.cloud.automl_v1.types.classification import ClassificationEvaluationMetrics -from google.cloud.automl_v1.types.classification import ClassificationType -from google.cloud.automl_v1.types.data_items import Document -from google.cloud.automl_v1.types.data_items import DocumentDimensions -from google.cloud.automl_v1.types.data_items import ExamplePayload -from google.cloud.automl_v1.types.data_items import Image -from google.cloud.automl_v1.types.data_items import TextSnippet -from google.cloud.automl_v1.types.dataset import Dataset -from google.cloud.automl_v1.types.detection import BoundingBoxMetricsEntry -from google.cloud.automl_v1.types.detection import ImageObjectDetectionAnnotation -from google.cloud.automl_v1.types.detection import ImageObjectDetectionEvaluationMetrics -from google.cloud.automl_v1.types.geometry import BoundingPoly -from google.cloud.automl_v1.types.geometry import NormalizedVertex -from google.cloud.automl_v1.types.image import ImageClassificationDatasetMetadata -from google.cloud.automl_v1.types.image import ImageClassificationModelDeploymentMetadata -from google.cloud.automl_v1.types.image import ImageClassificationModelMetadata -from google.cloud.automl_v1.types.image import ImageObjectDetectionDatasetMetadata -from google.cloud.automl_v1.types.image import ImageObjectDetectionModelDeploymentMetadata -from google.cloud.automl_v1.types.image import ImageObjectDetectionModelMetadata -from google.cloud.automl_v1.types.io import BatchPredictInputConfig -from google.cloud.automl_v1.types.io import BatchPredictOutputConfig -from google.cloud.automl_v1.types.io import DocumentInputConfig -from google.cloud.automl_v1.types.io import GcsDestination -from google.cloud.automl_v1.types.io import GcsSource -from google.cloud.automl_v1.types.io import InputConfig -from google.cloud.automl_v1.types.io import ModelExportOutputConfig -from google.cloud.automl_v1.types.io import OutputConfig -from google.cloud.automl_v1.types.model import Model -from google.cloud.automl_v1.types.model_evaluation import ModelEvaluation -from google.cloud.automl_v1.types.operations import BatchPredictOperationMetadata -from google.cloud.automl_v1.types.operations import CreateDatasetOperationMetadata -from google.cloud.automl_v1.types.operations import CreateModelOperationMetadata -from google.cloud.automl_v1.types.operations import DeleteOperationMetadata -from google.cloud.automl_v1.types.operations import DeployModelOperationMetadata -from google.cloud.automl_v1.types.operations import ExportDataOperationMetadata -from google.cloud.automl_v1.types.operations import ExportModelOperationMetadata -from google.cloud.automl_v1.types.operations import ImportDataOperationMetadata -from google.cloud.automl_v1.types.operations import OperationMetadata -from google.cloud.automl_v1.types.operations import UndeployModelOperationMetadata -from google.cloud.automl_v1.types.prediction_service import BatchPredictRequest -from google.cloud.automl_v1.types.prediction_service import BatchPredictResult -from google.cloud.automl_v1.types.prediction_service import PredictRequest -from google.cloud.automl_v1.types.prediction_service import PredictResponse -from google.cloud.automl_v1.types.service import CreateDatasetRequest -from google.cloud.automl_v1.types.service import CreateModelRequest -from google.cloud.automl_v1.types.service import DeleteDatasetRequest -from google.cloud.automl_v1.types.service import DeleteModelRequest -from google.cloud.automl_v1.types.service import DeployModelRequest -from google.cloud.automl_v1.types.service import ExportDataRequest -from google.cloud.automl_v1.types.service import ExportModelRequest -from google.cloud.automl_v1.types.service import GetAnnotationSpecRequest -from google.cloud.automl_v1.types.service import GetDatasetRequest -from google.cloud.automl_v1.types.service import GetModelEvaluationRequest -from google.cloud.automl_v1.types.service import GetModelRequest -from google.cloud.automl_v1.types.service import ImportDataRequest -from google.cloud.automl_v1.types.service import ListDatasetsRequest -from google.cloud.automl_v1.types.service import ListDatasetsResponse -from google.cloud.automl_v1.types.service import ListModelEvaluationsRequest -from google.cloud.automl_v1.types.service import ListModelEvaluationsResponse -from google.cloud.automl_v1.types.service import ListModelsRequest -from google.cloud.automl_v1.types.service import ListModelsResponse -from google.cloud.automl_v1.types.service import UndeployModelRequest -from google.cloud.automl_v1.types.service import UpdateDatasetRequest -from google.cloud.automl_v1.types.service import UpdateModelRequest -from google.cloud.automl_v1.types.text import TextClassificationDatasetMetadata -from google.cloud.automl_v1.types.text import TextClassificationModelMetadata -from google.cloud.automl_v1.types.text import TextExtractionDatasetMetadata -from google.cloud.automl_v1.types.text import TextExtractionModelMetadata -from google.cloud.automl_v1.types.text import TextSentimentDatasetMetadata -from google.cloud.automl_v1.types.text import TextSentimentModelMetadata -from google.cloud.automl_v1.types.text_extraction import TextExtractionAnnotation -from google.cloud.automl_v1.types.text_extraction import TextExtractionEvaluationMetrics -from google.cloud.automl_v1.types.text_segment import TextSegment -from google.cloud.automl_v1.types.text_sentiment import TextSentimentAnnotation -from google.cloud.automl_v1.types.text_sentiment import TextSentimentEvaluationMetrics -from google.cloud.automl_v1.types.translation import TranslationAnnotation -from google.cloud.automl_v1.types.translation import TranslationDatasetMetadata -from google.cloud.automl_v1.types.translation import TranslationEvaluationMetrics -from google.cloud.automl_v1.types.translation import TranslationModelMetadata - -__all__ = ('AutoMlClient', - 'AutoMlAsyncClient', - 'PredictionServiceClient', - 'PredictionServiceAsyncClient', - 'AnnotationPayload', - 'AnnotationSpec', - 'ClassificationAnnotation', - 'ClassificationEvaluationMetrics', - 'ClassificationType', - 'Document', - 'DocumentDimensions', - 'ExamplePayload', - 'Image', - 'TextSnippet', - 'Dataset', - 'BoundingBoxMetricsEntry', - 'ImageObjectDetectionAnnotation', - 'ImageObjectDetectionEvaluationMetrics', - 'BoundingPoly', - 'NormalizedVertex', - 'ImageClassificationDatasetMetadata', - 'ImageClassificationModelDeploymentMetadata', - 'ImageClassificationModelMetadata', - 'ImageObjectDetectionDatasetMetadata', - 'ImageObjectDetectionModelDeploymentMetadata', - 'ImageObjectDetectionModelMetadata', - 'BatchPredictInputConfig', - 'BatchPredictOutputConfig', - 'DocumentInputConfig', - 'GcsDestination', - 'GcsSource', - 'InputConfig', - 'ModelExportOutputConfig', - 'OutputConfig', - 'Model', - 'ModelEvaluation', - 'BatchPredictOperationMetadata', - 'CreateDatasetOperationMetadata', - 'CreateModelOperationMetadata', - 'DeleteOperationMetadata', - 'DeployModelOperationMetadata', - 'ExportDataOperationMetadata', - 'ExportModelOperationMetadata', - 'ImportDataOperationMetadata', - 'OperationMetadata', - 'UndeployModelOperationMetadata', - 'BatchPredictRequest', - 'BatchPredictResult', - 'PredictRequest', - 'PredictResponse', - 'CreateDatasetRequest', - 'CreateModelRequest', - 'DeleteDatasetRequest', - 'DeleteModelRequest', - 'DeployModelRequest', - 'ExportDataRequest', - 'ExportModelRequest', - 'GetAnnotationSpecRequest', - 'GetDatasetRequest', - 'GetModelEvaluationRequest', - 'GetModelRequest', - 'ImportDataRequest', - 'ListDatasetsRequest', - 'ListDatasetsResponse', - 'ListModelEvaluationsRequest', - 'ListModelEvaluationsResponse', - 'ListModelsRequest', - 'ListModelsResponse', - 'UndeployModelRequest', - 'UpdateDatasetRequest', - 'UpdateModelRequest', - 'TextClassificationDatasetMetadata', - 'TextClassificationModelMetadata', - 'TextExtractionDatasetMetadata', - 'TextExtractionModelMetadata', - 'TextSentimentDatasetMetadata', - 'TextSentimentModelMetadata', - 'TextExtractionAnnotation', - 'TextExtractionEvaluationMetrics', - 'TextSegment', - 'TextSentimentAnnotation', - 'TextSentimentEvaluationMetrics', - 'TranslationAnnotation', - 'TranslationDatasetMetadata', - 'TranslationEvaluationMetrics', - 'TranslationModelMetadata', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/gapic_version.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/py.typed b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/py.typed deleted file mode 100644 index 0560ba18db71..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-automl package uses inline types. diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/__init__.py deleted file mode 100644 index 18b9c754c3ed..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/__init__.py +++ /dev/null @@ -1,196 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.automl_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.auto_ml import AutoMlClient -from .services.auto_ml import AutoMlAsyncClient -from .services.prediction_service import PredictionServiceClient -from .services.prediction_service import PredictionServiceAsyncClient - -from .types.annotation_payload import AnnotationPayload -from .types.annotation_spec import AnnotationSpec -from .types.classification import ClassificationAnnotation -from .types.classification import ClassificationEvaluationMetrics -from .types.classification import ClassificationType -from .types.data_items import Document -from .types.data_items import DocumentDimensions -from .types.data_items import ExamplePayload -from .types.data_items import Image -from .types.data_items import TextSnippet -from .types.dataset import Dataset -from .types.detection import BoundingBoxMetricsEntry -from .types.detection import ImageObjectDetectionAnnotation -from .types.detection import ImageObjectDetectionEvaluationMetrics -from .types.geometry import BoundingPoly -from .types.geometry import NormalizedVertex -from .types.image import ImageClassificationDatasetMetadata -from .types.image import ImageClassificationModelDeploymentMetadata -from .types.image import ImageClassificationModelMetadata -from .types.image import ImageObjectDetectionDatasetMetadata -from .types.image import ImageObjectDetectionModelDeploymentMetadata -from .types.image import ImageObjectDetectionModelMetadata -from .types.io import BatchPredictInputConfig -from .types.io import BatchPredictOutputConfig -from .types.io import DocumentInputConfig -from .types.io import GcsDestination -from .types.io import GcsSource -from .types.io import InputConfig -from .types.io import ModelExportOutputConfig -from .types.io import OutputConfig -from .types.model import Model -from .types.model_evaluation import ModelEvaluation -from .types.operations import BatchPredictOperationMetadata -from .types.operations import CreateDatasetOperationMetadata -from .types.operations import CreateModelOperationMetadata -from .types.operations import DeleteOperationMetadata -from .types.operations import DeployModelOperationMetadata -from .types.operations import ExportDataOperationMetadata -from .types.operations import ExportModelOperationMetadata -from .types.operations import ImportDataOperationMetadata -from .types.operations import OperationMetadata -from .types.operations import UndeployModelOperationMetadata -from .types.prediction_service import BatchPredictRequest -from .types.prediction_service import BatchPredictResult -from .types.prediction_service import PredictRequest -from .types.prediction_service import PredictResponse -from .types.service import CreateDatasetRequest -from .types.service import CreateModelRequest -from .types.service import DeleteDatasetRequest -from .types.service import DeleteModelRequest -from .types.service import DeployModelRequest -from .types.service import ExportDataRequest -from .types.service import ExportModelRequest -from .types.service import GetAnnotationSpecRequest -from .types.service import GetDatasetRequest -from .types.service import GetModelEvaluationRequest -from .types.service import GetModelRequest -from .types.service import ImportDataRequest -from .types.service import ListDatasetsRequest -from .types.service import ListDatasetsResponse -from .types.service import ListModelEvaluationsRequest -from .types.service import ListModelEvaluationsResponse -from .types.service import ListModelsRequest -from .types.service import ListModelsResponse -from .types.service import UndeployModelRequest -from .types.service import UpdateDatasetRequest -from .types.service import UpdateModelRequest -from .types.text import TextClassificationDatasetMetadata -from .types.text import TextClassificationModelMetadata -from .types.text import TextExtractionDatasetMetadata -from .types.text import TextExtractionModelMetadata -from .types.text import TextSentimentDatasetMetadata -from .types.text import TextSentimentModelMetadata -from .types.text_extraction import TextExtractionAnnotation -from .types.text_extraction import TextExtractionEvaluationMetrics -from .types.text_segment import TextSegment -from .types.text_sentiment import TextSentimentAnnotation -from .types.text_sentiment import TextSentimentEvaluationMetrics -from .types.translation import TranslationAnnotation -from .types.translation import TranslationDatasetMetadata -from .types.translation import TranslationEvaluationMetrics -from .types.translation import TranslationModelMetadata - -__all__ = ( - 'AutoMlAsyncClient', - 'PredictionServiceAsyncClient', -'AnnotationPayload', -'AnnotationSpec', -'AutoMlClient', -'BatchPredictInputConfig', -'BatchPredictOperationMetadata', -'BatchPredictOutputConfig', -'BatchPredictRequest', -'BatchPredictResult', -'BoundingBoxMetricsEntry', -'BoundingPoly', -'ClassificationAnnotation', -'ClassificationEvaluationMetrics', -'ClassificationType', -'CreateDatasetOperationMetadata', -'CreateDatasetRequest', -'CreateModelOperationMetadata', -'CreateModelRequest', -'Dataset', -'DeleteDatasetRequest', -'DeleteModelRequest', -'DeleteOperationMetadata', -'DeployModelOperationMetadata', -'DeployModelRequest', -'Document', -'DocumentDimensions', -'DocumentInputConfig', -'ExamplePayload', -'ExportDataOperationMetadata', -'ExportDataRequest', -'ExportModelOperationMetadata', -'ExportModelRequest', -'GcsDestination', -'GcsSource', -'GetAnnotationSpecRequest', -'GetDatasetRequest', -'GetModelEvaluationRequest', -'GetModelRequest', -'Image', -'ImageClassificationDatasetMetadata', -'ImageClassificationModelDeploymentMetadata', -'ImageClassificationModelMetadata', -'ImageObjectDetectionAnnotation', -'ImageObjectDetectionDatasetMetadata', -'ImageObjectDetectionEvaluationMetrics', -'ImageObjectDetectionModelDeploymentMetadata', -'ImageObjectDetectionModelMetadata', -'ImportDataOperationMetadata', -'ImportDataRequest', -'InputConfig', -'ListDatasetsRequest', -'ListDatasetsResponse', -'ListModelEvaluationsRequest', -'ListModelEvaluationsResponse', -'ListModelsRequest', -'ListModelsResponse', -'Model', -'ModelEvaluation', -'ModelExportOutputConfig', -'NormalizedVertex', -'OperationMetadata', -'OutputConfig', -'PredictRequest', -'PredictResponse', -'PredictionServiceClient', -'TextClassificationDatasetMetadata', -'TextClassificationModelMetadata', -'TextExtractionAnnotation', -'TextExtractionDatasetMetadata', -'TextExtractionEvaluationMetrics', -'TextExtractionModelMetadata', -'TextSegment', -'TextSentimentAnnotation', -'TextSentimentDatasetMetadata', -'TextSentimentEvaluationMetrics', -'TextSentimentModelMetadata', -'TextSnippet', -'TranslationAnnotation', -'TranslationDatasetMetadata', -'TranslationEvaluationMetrics', -'TranslationModelMetadata', -'UndeployModelOperationMetadata', -'UndeployModelRequest', -'UpdateDatasetRequest', -'UpdateModelRequest', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/gapic_metadata.json deleted file mode 100644 index 7d017052b295..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/gapic_metadata.json +++ /dev/null @@ -1,347 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.automl_v1", - "protoPackage": "google.cloud.automl.v1", - "schema": "1.0", - "services": { - "AutoMl": { - "clients": { - "grpc": { - "libraryClient": "AutoMlClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "CreateModel": { - "methods": [ - "create_model" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteModel": { - "methods": [ - "delete_model" - ] - }, - "DeployModel": { - "methods": [ - "deploy_model" - ] - }, - "ExportData": { - "methods": [ - "export_data" - ] - }, - "ExportModel": { - "methods": [ - "export_model" - ] - }, - "GetAnnotationSpec": { - "methods": [ - "get_annotation_spec" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "GetModel": { - "methods": [ - "get_model" - ] - }, - "GetModelEvaluation": { - "methods": [ - "get_model_evaluation" - ] - }, - "ImportData": { - "methods": [ - "import_data" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "ListModelEvaluations": { - "methods": [ - "list_model_evaluations" - ] - }, - "ListModels": { - "methods": [ - "list_models" - ] - }, - "UndeployModel": { - "methods": [ - "undeploy_model" - ] - }, - "UpdateDataset": { - "methods": [ - "update_dataset" - ] - }, - "UpdateModel": { - "methods": [ - "update_model" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AutoMlAsyncClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "CreateModel": { - "methods": [ - "create_model" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteModel": { - "methods": [ - "delete_model" - ] - }, - "DeployModel": { - "methods": [ - "deploy_model" - ] - }, - "ExportData": { - "methods": [ - "export_data" - ] - }, - "ExportModel": { - "methods": [ - "export_model" - ] - }, - "GetAnnotationSpec": { - "methods": [ - "get_annotation_spec" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "GetModel": { - "methods": [ - "get_model" - ] - }, - "GetModelEvaluation": { - "methods": [ - "get_model_evaluation" - ] - }, - "ImportData": { - "methods": [ - "import_data" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "ListModelEvaluations": { - "methods": [ - "list_model_evaluations" - ] - }, - "ListModels": { - "methods": [ - "list_models" - ] - }, - "UndeployModel": { - "methods": [ - "undeploy_model" - ] - }, - "UpdateDataset": { - "methods": [ - "update_dataset" - ] - }, - "UpdateModel": { - "methods": [ - "update_model" - ] - } - } - }, - "rest": { - "libraryClient": "AutoMlClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "CreateModel": { - "methods": [ - "create_model" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteModel": { - "methods": [ - "delete_model" - ] - }, - "DeployModel": { - "methods": [ - "deploy_model" - ] - }, - "ExportData": { - "methods": [ - "export_data" - ] - }, - "ExportModel": { - "methods": [ - "export_model" - ] - }, - "GetAnnotationSpec": { - "methods": [ - "get_annotation_spec" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "GetModel": { - "methods": [ - "get_model" - ] - }, - "GetModelEvaluation": { - "methods": [ - "get_model_evaluation" - ] - }, - "ImportData": { - "methods": [ - "import_data" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "ListModelEvaluations": { - "methods": [ - "list_model_evaluations" - ] - }, - "ListModels": { - "methods": [ - "list_models" - ] - }, - "UndeployModel": { - "methods": [ - "undeploy_model" - ] - }, - "UpdateDataset": { - "methods": [ - "update_dataset" - ] - }, - "UpdateModel": { - "methods": [ - "update_model" - ] - } - } - } - } - }, - "PredictionService": { - "clients": { - "grpc": { - "libraryClient": "PredictionServiceClient", - "rpcs": { - "BatchPredict": { - "methods": [ - "batch_predict" - ] - }, - "Predict": { - "methods": [ - "predict" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PredictionServiceAsyncClient", - "rpcs": { - "BatchPredict": { - "methods": [ - "batch_predict" - ] - }, - "Predict": { - "methods": [ - "predict" - ] - } - } - }, - "rest": { - "libraryClient": "PredictionServiceClient", - "rpcs": { - "BatchPredict": { - "methods": [ - "batch_predict" - ] - }, - "Predict": { - "methods": [ - "predict" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/gapic_version.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/py.typed b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/py.typed deleted file mode 100644 index 0560ba18db71..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-automl package uses inline types. diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/__init__.py deleted file mode 100644 index cbde42be45c8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AutoMlClient -from .async_client import AutoMlAsyncClient - -__all__ = ( - 'AutoMlClient', - 'AutoMlAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/async_client.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/async_client.py deleted file mode 100644 index f64d78dcab95..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/async_client.py +++ /dev/null @@ -1,2602 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.automl_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1.services.auto_ml import pagers -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import classification -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import detection -from google.cloud.automl_v1.types import image -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import operations -from google.cloud.automl_v1.types import service -from google.cloud.automl_v1.types import text -from google.cloud.automl_v1.types import text_extraction -from google.cloud.automl_v1.types import text_sentiment -from google.cloud.automl_v1.types import translation -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AutoMlTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AutoMlGrpcAsyncIOTransport -from .client import AutoMlClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AutoMlAsyncClient: - """AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - """ - - _client: AutoMlClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AutoMlClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AutoMlClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AutoMlClient._DEFAULT_UNIVERSE - - annotation_spec_path = staticmethod(AutoMlClient.annotation_spec_path) - parse_annotation_spec_path = staticmethod(AutoMlClient.parse_annotation_spec_path) - dataset_path = staticmethod(AutoMlClient.dataset_path) - parse_dataset_path = staticmethod(AutoMlClient.parse_dataset_path) - model_path = staticmethod(AutoMlClient.model_path) - parse_model_path = staticmethod(AutoMlClient.parse_model_path) - model_evaluation_path = staticmethod(AutoMlClient.model_evaluation_path) - parse_model_evaluation_path = staticmethod(AutoMlClient.parse_model_evaluation_path) - common_billing_account_path = staticmethod(AutoMlClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AutoMlClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AutoMlClient.common_folder_path) - parse_common_folder_path = staticmethod(AutoMlClient.parse_common_folder_path) - common_organization_path = staticmethod(AutoMlClient.common_organization_path) - parse_common_organization_path = staticmethod(AutoMlClient.parse_common_organization_path) - common_project_path = staticmethod(AutoMlClient.common_project_path) - parse_common_project_path = staticmethod(AutoMlClient.parse_common_project_path) - common_location_path = staticmethod(AutoMlClient.common_location_path) - parse_common_location_path = staticmethod(AutoMlClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlAsyncClient: The constructed client. - """ - return AutoMlClient.from_service_account_info.__func__(AutoMlAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlAsyncClient: The constructed client. - """ - return AutoMlClient.from_service_account_file.__func__(AutoMlAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AutoMlClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AutoMlTransport: - """Returns the transport used by the client instance. - - Returns: - AutoMlTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AutoMlClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AutoMlTransport, Callable[..., AutoMlTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the auto ml async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AutoMlTransport,Callable[..., AutoMlTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AutoMlTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AutoMlClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1.AutoMlAsyncClient`.", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1.AutoMl", - "credentialsType": None, - } - ) - - async def create_dataset(self, - request: Optional[Union[service.CreateDatasetRequest, dict]] = None, - *, - parent: Optional[str] = None, - dataset: Optional[gca_dataset.Dataset] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_create_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - operation = client.create_dataset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.CreateDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1.AutoMl.CreateDataset]. - parent (:class:`str`): - Required. The resource name of the - project to create the dataset for. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - dataset (:class:`google.cloud.automl_v1.types.Dataset`): - Required. The dataset to create. - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.automl_v1.types.Dataset` A workspace for solving a single, particular machine learning (ML) problem. - A workspace contains examples that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, dataset] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateDatasetRequest): - request = service.CreateDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if dataset is not None: - request.dataset = dataset - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gca_dataset.Dataset, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_dataset(self, - request: Optional[Union[service.GetDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataset.Dataset: - r"""Gets a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_get_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.GetDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.GetDataset][google.cloud.automl.v1.AutoMl.GetDataset]. - name (:class:`str`): - Required. The resource name of the - dataset to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetDatasetRequest): - request = service.GetDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_datasets(self, - request: Optional[Union[service.ListDatasetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatasetsAsyncPager: - r"""Lists datasets in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_list_datasets(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.ListDatasetsRequest, dict]]): - The request object. Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - parent (:class:`str`): - Required. The resource name of the - project from which to list datasets. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.services.auto_ml.pagers.ListDatasetsAsyncPager: - Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListDatasetsRequest): - request = service.ListDatasetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_datasets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatasetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_dataset(self, - request: Optional[Union[service.UpdateDatasetRequest, dict]] = None, - *, - dataset: Optional[gca_dataset.Dataset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_dataset.Dataset: - r"""Updates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_update_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = await client.update_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.UpdateDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] - dataset (:class:`google.cloud.automl_v1.types.Dataset`): - Required. The dataset which replaces - the resource on the server. - - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The update mask applies to - the resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [dataset, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateDatasetRequest): - request = service.UpdateDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if dataset is not None: - request.dataset = dataset - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("dataset.name", request.dataset.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dataset(self, - request: Optional[Union[service.DeleteDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_delete_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.DeleteDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. - name (:class:`str`): - Required. The resource name of the - dataset to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteDatasetRequest): - request = service.DeleteDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def import_data(self, - request: Optional[Union[service.ImportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_import_data(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - input_config = automl_v1.InputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - request = automl_v1.ImportDataRequest( - name="name_value", - input_config=input_config, - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.ImportDataRequest, dict]]): - The request object. Request message for - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. - name (:class:`str`): - Required. Dataset name. Dataset must - already exist. All imported annotations - and examples will be added. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (:class:`google.cloud.automl_v1.types.InputConfig`): - Required. The desired input location - and its domain specific semantics, if - any. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ImportDataRequest): - request = service.ImportDataRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.import_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def export_data(self, - request: Optional[Union[service.ExportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_export_data(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - output_config = automl_v1.OutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportDataRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.ExportDataRequest, dict]]): - The request object. Request message for - [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. - name (:class:`str`): - Required. The resource name of the - dataset. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1.types.OutputConfig`): - Required. The desired output - location. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportDataRequest): - request = service.ExportDataRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_annotation_spec(self, - request: Optional[Union[service.GetAnnotationSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> annotation_spec.AnnotationSpec: - r"""Gets an annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_get_annotation_spec(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_annotation_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.GetAnnotationSpecRequest, dict]]): - The request object. Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1.AutoMl.GetAnnotationSpec]. - name (:class:`str`): - Required. The resource name of the - annotation spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.AnnotationSpec: - A definition of an annotation spec. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAnnotationSpecRequest): - request = service.GetAnnotationSpecRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_annotation_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_model(self, - request: Optional[Union[service.CreateModelRequest, dict]] = None, - *, - parent: Optional[str] = None, - model: Optional[gca_model.Model] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_create_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.CreateModelRequest, dict]]): - The request object. Request message for - [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. - parent (:class:`str`): - Required. Resource name of the parent - project where the model is being - created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - model (:class:`google.cloud.automl_v1.types.Model`): - Required. The model to create. - This corresponds to the ``model`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.automl_v1.types.Model` API proto - representing a trained machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, model] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateModelRequest): - request = service.CreateModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if model is not None: - request.model = model - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gca_model.Model, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_model(self, - request: Optional[Union[service.GetModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model.Model: - r"""Gets a model. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_get_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.GetModelRequest, dict]]): - The request object. Request message for - [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. - name (:class:`str`): - Required. Resource name of the model. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Model: - API proto representing a trained - machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelRequest): - request = service.GetModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_models(self, - request: Optional[Union[service.ListModelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelsAsyncPager: - r"""Lists models. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_list_models(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.ListModelsRequest, dict]]): - The request object. Request message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - parent (:class:`str`): - Required. Resource name of the - project, from which to list the models. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.services.auto_ml.pagers.ListModelsAsyncPager: - Response message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelsRequest): - request = service.ListModelsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_models] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListModelsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_model(self, - request: Optional[Union[service.DeleteModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_delete_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.DeleteModelRequest, dict]]): - The request object. Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. - name (:class:`str`): - Required. Resource name of the model - being deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteModelRequest): - request = service.DeleteModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_model(self, - request: Optional[Union[service.UpdateModelRequest, dict]] = None, - *, - model: Optional[gca_model.Model] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_model.Model: - r"""Updates a model. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_update_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.UpdateModelRequest( - ) - - # Make the request - response = await client.update_model(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.UpdateModelRequest, dict]]): - The request object. Request message for - [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] - model (:class:`google.cloud.automl_v1.types.Model`): - Required. The model which replaces - the resource on the server. - - This corresponds to the ``model`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The update mask applies to - the resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Model: - API proto representing a trained - machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [model, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateModelRequest): - request = service.UpdateModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if model is not None: - request.model = model - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("model.name", request.model.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def deploy_model(self, - request: Optional[Union[service.DeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - [node_number][google.cloud.automl.v1p1beta.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_deploy_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.DeployModelRequest, dict]]): - The request object. Request message for - [AutoMl.DeployModel][google.cloud.automl.v1.AutoMl.DeployModel]. - name (:class:`str`): - Required. Resource name of the model - to deploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeployModelRequest): - request = service.DeployModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.deploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def undeploy_model(self, - request: Optional[Union[service.UndeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_undeploy_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.UndeployModelRequest, dict]]): - The request object. Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1.AutoMl.UndeployModel]. - name (:class:`str`): - Required. Resource name of the model - to undeploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UndeployModelRequest): - request = service.UndeployModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.undeploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def export_model(self, - request: Optional[Union[service.ExportModelRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.ModelExportOutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - [ModelExportOutputConfig][google.cloud.automl.v1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_export_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - output_config = automl_v1.ModelExportOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportModelRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.ExportModelRequest, dict]]): - The request object. Request message for - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an - error code will be returned. - name (:class:`str`): - Required. The resource name of the - model to export. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1.types.ModelExportOutputConfig`): - Required. The desired output location - and configuration. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportModelRequest): - request = service.ExportModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_model_evaluation(self, - request: Optional[Union[service.GetModelEvaluationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model_evaluation.ModelEvaluation: - r"""Gets a model evaluation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_get_model_evaluation(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model_evaluation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.GetModelEvaluationRequest, dict]]): - The request object. Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. - name (:class:`str`): - Required. Resource name for the model - evaluation. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.ModelEvaluation: - Evaluation results of a model. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelEvaluationRequest): - request = service.GetModelEvaluationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_model_evaluation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_model_evaluations(self, - request: Optional[Union[service.ListModelEvaluationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelEvaluationsAsyncPager: - r"""Lists model evaluations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_list_model_evaluations(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.ListModelEvaluationsRequest( - parent="parent_value", - filter="filter_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.ListModelEvaluationsRequest, dict]]): - The request object. Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - parent (:class:`str`): - Required. Resource name of the model - to list the model evaluations for. If - modelId is set as "-", this will list - model evaluations from across all models - of the parent location. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (:class:`str`): - Required. An expression for filtering the results of the - request. - - - ``annotation_spec_id`` - for =, != or existence. See - example below for the last. - - Some examples of using the filter are: - - - ``annotation_spec_id!=4`` --> The model evaluation - was done for annotation spec with ID different than - 4. - - ``NOT annotation_spec_id:*`` --> The model evaluation - was done for aggregate of all annotation specs. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.services.auto_ml.pagers.ListModelEvaluationsAsyncPager: - Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelEvaluationsRequest): - request = service.ListModelEvaluationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_model_evaluations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListModelEvaluationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AutoMlAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AutoMlAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/client.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/client.py deleted file mode 100644 index 4ad9af10bfb8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/client.py +++ /dev/null @@ -1,2973 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.automl_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1.services.auto_ml import pagers -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import classification -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import detection -from google.cloud.automl_v1.types import image -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import operations -from google.cloud.automl_v1.types import service -from google.cloud.automl_v1.types import text -from google.cloud.automl_v1.types import text_extraction -from google.cloud.automl_v1.types import text_sentiment -from google.cloud.automl_v1.types import translation -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AutoMlTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AutoMlGrpcTransport -from .transports.grpc_asyncio import AutoMlGrpcAsyncIOTransport -from .transports.rest import AutoMlRestTransport - - -class AutoMlClientMeta(type): - """Metaclass for the AutoMl client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AutoMlTransport]] - _transport_registry["grpc"] = AutoMlGrpcTransport - _transport_registry["grpc_asyncio"] = AutoMlGrpcAsyncIOTransport - _transport_registry["rest"] = AutoMlRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AutoMlTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AutoMlClient(metaclass=AutoMlClientMeta): - """AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "automl.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "automl.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AutoMlTransport: - """Returns the transport used by the client instance. - - Returns: - AutoMlTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def annotation_spec_path(project: str,location: str,dataset: str,annotation_spec: str,) -> str: - """Returns a fully-qualified annotation_spec string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format(project=project, location=location, dataset=dataset, annotation_spec=annotation_spec, ) - - @staticmethod - def parse_annotation_spec_path(path: str) -> Dict[str,str]: - """Parses a annotation_spec path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)/annotationSpecs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str,location: str,dataset: str,) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format(project=project, location=location, dataset=dataset, ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str,str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def model_path(project: str,location: str,model: str,) -> str: - """Returns a fully-qualified model string.""" - return "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - - @staticmethod - def parse_model_path(path: str) -> Dict[str,str]: - """Parses a model path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def model_evaluation_path(project: str,location: str,model: str,model_evaluation: str,) -> str: - """Returns a fully-qualified model_evaluation string.""" - return "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}".format(project=project, location=location, model=model, model_evaluation=model_evaluation, ) - - @staticmethod - def parse_model_evaluation_path(path: str) -> Dict[str,str]: - """Parses a model_evaluation path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)/modelEvaluations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AutoMlClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AutoMlClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AutoMlClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AutoMlTransport, Callable[..., AutoMlTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the auto ml client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AutoMlTransport,Callable[..., AutoMlTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AutoMlTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AutoMlClient._read_environment_variables() - self._client_cert_source = AutoMlClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AutoMlClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AutoMlTransport) - if transport_provided: - # transport is a AutoMlTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AutoMlTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AutoMlClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AutoMlTransport], Callable[..., AutoMlTransport]] = ( - AutoMlClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AutoMlTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1.AutoMlClient`.", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1.AutoMl", - "credentialsType": None, - } - ) - - def create_dataset(self, - request: Optional[Union[service.CreateDatasetRequest, dict]] = None, - *, - parent: Optional[str] = None, - dataset: Optional[gca_dataset.Dataset] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_create_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - operation = client.create_dataset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.CreateDatasetRequest, dict]): - The request object. Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1.AutoMl.CreateDataset]. - parent (str): - Required. The resource name of the - project to create the dataset for. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - dataset (google.cloud.automl_v1.types.Dataset): - Required. The dataset to create. - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.automl_v1.types.Dataset` A workspace for solving a single, particular machine learning (ML) problem. - A workspace contains examples that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, dataset] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateDatasetRequest): - request = service.CreateDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if dataset is not None: - request.dataset = dataset - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gca_dataset.Dataset, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_dataset(self, - request: Optional[Union[service.GetDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataset.Dataset: - r"""Gets a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_get_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = client.get_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.GetDatasetRequest, dict]): - The request object. Request message for - [AutoMl.GetDataset][google.cloud.automl.v1.AutoMl.GetDataset]. - name (str): - Required. The resource name of the - dataset to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetDatasetRequest): - request = service.GetDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_datasets(self, - request: Optional[Union[service.ListDatasetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatasetsPager: - r"""Lists datasets in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_list_datasets(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.ListDatasetsRequest, dict]): - The request object. Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - parent (str): - Required. The resource name of the - project from which to list datasets. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.services.auto_ml.pagers.ListDatasetsPager: - Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListDatasetsRequest): - request = service.ListDatasetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_datasets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatasetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_dataset(self, - request: Optional[Union[service.UpdateDatasetRequest, dict]] = None, - *, - dataset: Optional[gca_dataset.Dataset] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_dataset.Dataset: - r"""Updates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_update_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = client.update_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.UpdateDatasetRequest, dict]): - The request object. Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] - dataset (google.cloud.automl_v1.types.Dataset): - Required. The dataset which replaces - the resource on the server. - - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The update mask applies to - the resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [dataset, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateDatasetRequest): - request = service.UpdateDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if dataset is not None: - request.dataset = dataset - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("dataset.name", request.dataset.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dataset(self, - request: Optional[Union[service.DeleteDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_delete_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.DeleteDatasetRequest, dict]): - The request object. Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. - name (str): - Required. The resource name of the - dataset to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteDatasetRequest): - request = service.DeleteDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def import_data(self, - request: Optional[Union[service.ImportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_import_data(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - input_config = automl_v1.InputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - request = automl_v1.ImportDataRequest( - name="name_value", - input_config=input_config, - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.ImportDataRequest, dict]): - The request object. Request message for - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. - name (str): - Required. Dataset name. Dataset must - already exist. All imported annotations - and examples will be added. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (google.cloud.automl_v1.types.InputConfig): - Required. The desired input location - and its domain specific semantics, if - any. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ImportDataRequest): - request = service.ImportDataRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def export_data(self, - request: Optional[Union[service.ExportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_export_data(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - output_config = automl_v1.OutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportDataRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.ExportDataRequest, dict]): - The request object. Request message for - [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. - name (str): - Required. The resource name of the - dataset. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1.types.OutputConfig): - Required. The desired output - location. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportDataRequest): - request = service.ExportDataRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_annotation_spec(self, - request: Optional[Union[service.GetAnnotationSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> annotation_spec.AnnotationSpec: - r"""Gets an annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_get_annotation_spec(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_annotation_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.GetAnnotationSpecRequest, dict]): - The request object. Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1.AutoMl.GetAnnotationSpec]. - name (str): - Required. The resource name of the - annotation spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.AnnotationSpec: - A definition of an annotation spec. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAnnotationSpecRequest): - request = service.GetAnnotationSpecRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_annotation_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_model(self, - request: Optional[Union[service.CreateModelRequest, dict]] = None, - *, - parent: Optional[str] = None, - model: Optional[gca_model.Model] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_create_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.CreateModelRequest, dict]): - The request object. Request message for - [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. - parent (str): - Required. Resource name of the parent - project where the model is being - created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - model (google.cloud.automl_v1.types.Model): - Required. The model to create. - This corresponds to the ``model`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.automl_v1.types.Model` API proto - representing a trained machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, model] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateModelRequest): - request = service.CreateModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if model is not None: - request.model = model - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gca_model.Model, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_model(self, - request: Optional[Union[service.GetModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model.Model: - r"""Gets a model. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_get_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = client.get_model(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.GetModelRequest, dict]): - The request object. Request message for - [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. - name (str): - Required. Resource name of the model. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Model: - API proto representing a trained - machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelRequest): - request = service.GetModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_models(self, - request: Optional[Union[service.ListModelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelsPager: - r"""Lists models. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_list_models(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.ListModelsRequest, dict]): - The request object. Request message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - parent (str): - Required. Resource name of the - project, from which to list the models. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.services.auto_ml.pagers.ListModelsPager: - Response message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelsRequest): - request = service.ListModelsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_models] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListModelsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_model(self, - request: Optional[Union[service.DeleteModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_delete_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.DeleteModelRequest, dict]): - The request object. Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. - name (str): - Required. Resource name of the model - being deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteModelRequest): - request = service.DeleteModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_model(self, - request: Optional[Union[service.UpdateModelRequest, dict]] = None, - *, - model: Optional[gca_model.Model] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_model.Model: - r"""Updates a model. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_update_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.UpdateModelRequest( - ) - - # Make the request - response = client.update_model(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.UpdateModelRequest, dict]): - The request object. Request message for - [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] - model (google.cloud.automl_v1.types.Model): - Required. The model which replaces - the resource on the server. - - This corresponds to the ``model`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The update mask applies to - the resource. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.Model: - API proto representing a trained - machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [model, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateModelRequest): - request = service.UpdateModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if model is not None: - request.model = model - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("model.name", request.model.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def deploy_model(self, - request: Optional[Union[service.DeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - [node_number][google.cloud.automl.v1p1beta.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_deploy_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.DeployModelRequest, dict]): - The request object. Request message for - [AutoMl.DeployModel][google.cloud.automl.v1.AutoMl.DeployModel]. - name (str): - Required. Resource name of the model - to deploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeployModelRequest): - request = service.DeployModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def undeploy_model(self, - request: Optional[Union[service.UndeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_undeploy_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.UndeployModelRequest, dict]): - The request object. Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1.AutoMl.UndeployModel]. - name (str): - Required. Resource name of the model - to undeploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UndeployModelRequest): - request = service.UndeployModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.undeploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def export_model(self, - request: Optional[Union[service.ExportModelRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.ModelExportOutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - [ModelExportOutputConfig][google.cloud.automl.v1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_export_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - output_config = automl_v1.ModelExportOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportModelRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.ExportModelRequest, dict]): - The request object. Request message for - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an - error code will be returned. - name (str): - Required. The resource name of the - model to export. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1.types.ModelExportOutputConfig): - Required. The desired output location - and configuration. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportModelRequest): - request = service.ExportModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_model_evaluation(self, - request: Optional[Union[service.GetModelEvaluationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model_evaluation.ModelEvaluation: - r"""Gets a model evaluation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_get_model_evaluation(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = client.get_model_evaluation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.GetModelEvaluationRequest, dict]): - The request object. Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. - name (str): - Required. Resource name for the model - evaluation. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.ModelEvaluation: - Evaluation results of a model. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelEvaluationRequest): - request = service.GetModelEvaluationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_model_evaluation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_model_evaluations(self, - request: Optional[Union[service.ListModelEvaluationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelEvaluationsPager: - r"""Lists model evaluations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_list_model_evaluations(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.ListModelEvaluationsRequest( - parent="parent_value", - filter="filter_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.ListModelEvaluationsRequest, dict]): - The request object. Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - parent (str): - Required. Resource name of the model - to list the model evaluations for. If - modelId is set as "-", this will list - model evaluations from across all models - of the parent location. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - filter (str): - Required. An expression for filtering the results of the - request. - - - ``annotation_spec_id`` - for =, != or existence. See - example below for the last. - - Some examples of using the filter are: - - - ``annotation_spec_id!=4`` --> The model evaluation - was done for annotation spec with ID different than - 4. - - ``NOT annotation_spec_id:*`` --> The model evaluation - was done for aggregate of all annotation specs. - - This corresponds to the ``filter`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.services.auto_ml.pagers.ListModelEvaluationsPager: - Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, filter] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelEvaluationsRequest): - request = service.ListModelEvaluationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if filter is not None: - request.filter = filter - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_model_evaluations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListModelEvaluationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AutoMlClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AutoMlClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/pagers.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/pagers.py deleted file mode 100644 index 2091f0d52f54..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/pagers.py +++ /dev/null @@ -1,447 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import service - - -class ListDatasetsPager: - """A pager for iterating through ``list_datasets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1.types.ListDatasetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``datasets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatasets`` requests and continue to iterate - through the ``datasets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1.types.ListDatasetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListDatasetsResponse], - request: service.ListDatasetsRequest, - response: service.ListDatasetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1.types.ListDatasetsRequest): - The initial request object. - response (google.cloud.automl_v1.types.ListDatasetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListDatasetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListDatasetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dataset.Dataset]: - for page in self.pages: - yield from page.datasets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatasetsAsyncPager: - """A pager for iterating through ``list_datasets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1.types.ListDatasetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``datasets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatasets`` requests and continue to iterate - through the ``datasets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1.types.ListDatasetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListDatasetsResponse]], - request: service.ListDatasetsRequest, - response: service.ListDatasetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1.types.ListDatasetsRequest): - The initial request object. - response (google.cloud.automl_v1.types.ListDatasetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListDatasetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListDatasetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dataset.Dataset]: - async def async_generator(): - async for page in self.pages: - for response in page.datasets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelsPager: - """A pager for iterating through ``list_models`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1.types.ListModelsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``model`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListModels`` requests and continue to iterate - through the ``model`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1.types.ListModelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListModelsResponse], - request: service.ListModelsRequest, - response: service.ListModelsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1.types.ListModelsRequest): - The initial request object. - response (google.cloud.automl_v1.types.ListModelsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListModelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[model.Model]: - for page in self.pages: - yield from page.model - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelsAsyncPager: - """A pager for iterating through ``list_models`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1.types.ListModelsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``model`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListModels`` requests and continue to iterate - through the ``model`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1.types.ListModelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListModelsResponse]], - request: service.ListModelsRequest, - response: service.ListModelsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1.types.ListModelsRequest): - The initial request object. - response (google.cloud.automl_v1.types.ListModelsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListModelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[model.Model]: - async def async_generator(): - async for page in self.pages: - for response in page.model: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelEvaluationsPager: - """A pager for iterating through ``list_model_evaluations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1.types.ListModelEvaluationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``model_evaluation`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListModelEvaluations`` requests and continue to iterate - through the ``model_evaluation`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1.types.ListModelEvaluationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListModelEvaluationsResponse], - request: service.ListModelEvaluationsRequest, - response: service.ListModelEvaluationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1.types.ListModelEvaluationsRequest): - The initial request object. - response (google.cloud.automl_v1.types.ListModelEvaluationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelEvaluationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListModelEvaluationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[model_evaluation.ModelEvaluation]: - for page in self.pages: - yield from page.model_evaluation - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelEvaluationsAsyncPager: - """A pager for iterating through ``list_model_evaluations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1.types.ListModelEvaluationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``model_evaluation`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListModelEvaluations`` requests and continue to iterate - through the ``model_evaluation`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1.types.ListModelEvaluationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListModelEvaluationsResponse]], - request: service.ListModelEvaluationsRequest, - response: service.ListModelEvaluationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1.types.ListModelEvaluationsRequest): - The initial request object. - response (google.cloud.automl_v1.types.ListModelEvaluationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelEvaluationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListModelEvaluationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[model_evaluation.ModelEvaluation]: - async def async_generator(): - async for page in self.pages: - for response in page.model_evaluation: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/README.rst b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/README.rst deleted file mode 100644 index 23f3112ee6dd..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AutoMlTransport` is the ABC for all transports. -- public child `AutoMlGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AutoMlGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAutoMlRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AutoMlRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/__init__.py deleted file mode 100644 index ad74a7909b03..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AutoMlTransport -from .grpc import AutoMlGrpcTransport -from .grpc_asyncio import AutoMlGrpcAsyncIOTransport -from .rest import AutoMlRestTransport -from .rest import AutoMlRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AutoMlTransport]] -_transport_registry['grpc'] = AutoMlGrpcTransport -_transport_registry['grpc_asyncio'] = AutoMlGrpcAsyncIOTransport -_transport_registry['rest'] = AutoMlRestTransport - -__all__ = ( - 'AutoMlTransport', - 'AutoMlGrpcTransport', - 'AutoMlGrpcAsyncIOTransport', - 'AutoMlRestTransport', - 'AutoMlRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/base.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/base.py deleted file mode 100644 index 1bf3fcfe5e91..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/base.py +++ /dev/null @@ -1,495 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.automl_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import service -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AutoMlTransport(abc.ABC): - """Abstract transport class for AutoMl.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'automl.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_dataset: gapic_v1.method.wrap_method( - self.create_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.get_dataset: gapic_v1.method.wrap_method( - self.get_dataset, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_datasets: gapic_v1.method.wrap_method( - self.list_datasets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_dataset: gapic_v1.method.wrap_method( - self.update_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.delete_dataset: gapic_v1.method.wrap_method( - self.delete_dataset, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.import_data: gapic_v1.method.wrap_method( - self.import_data, - default_timeout=5.0, - client_info=client_info, - ), - self.export_data: gapic_v1.method.wrap_method( - self.export_data, - default_timeout=5.0, - client_info=client_info, - ), - self.get_annotation_spec: gapic_v1.method.wrap_method( - self.get_annotation_spec, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.create_model: gapic_v1.method.wrap_method( - self.create_model, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model: gapic_v1.method.wrap_method( - self.get_model, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_models: gapic_v1.method.wrap_method( - self.list_models, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.delete_model: gapic_v1.method.wrap_method( - self.delete_model, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_model: gapic_v1.method.wrap_method( - self.update_model, - default_timeout=5.0, - client_info=client_info, - ), - self.deploy_model: gapic_v1.method.wrap_method( - self.deploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.undeploy_model: gapic_v1.method.wrap_method( - self.undeploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.export_model: gapic_v1.method.wrap_method( - self.export_model, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model_evaluation: gapic_v1.method.wrap_method( - self.get_model_evaluation, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_model_evaluations: gapic_v1.method.wrap_method( - self.list_model_evaluations, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - Union[ - dataset.Dataset, - Awaitable[dataset.Dataset] - ]]: - raise NotImplementedError() - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - Union[ - service.ListDatasetsResponse, - Awaitable[service.ListDatasetsResponse] - ]]: - raise NotImplementedError() - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - Union[ - gca_dataset.Dataset, - Awaitable[gca_dataset.Dataset] - ]]: - raise NotImplementedError() - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - Union[ - annotation_spec.AnnotationSpec, - Awaitable[annotation_spec.AnnotationSpec] - ]]: - raise NotImplementedError() - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - Union[ - model.Model, - Awaitable[model.Model] - ]]: - raise NotImplementedError() - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - Union[ - service.ListModelsResponse, - Awaitable[service.ListModelsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_model(self) -> Callable[ - [service.UpdateModelRequest], - Union[ - gca_model.Model, - Awaitable[gca_model.Model] - ]]: - raise NotImplementedError() - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - Union[ - model_evaluation.ModelEvaluation, - Awaitable[model_evaluation.ModelEvaluation] - ]]: - raise NotImplementedError() - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - Union[ - service.ListModelEvaluationsResponse, - Awaitable[service.ListModelEvaluationsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AutoMlTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/grpc.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/grpc.py deleted file mode 100644 index 12365491f4a0..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/grpc.py +++ /dev/null @@ -1,879 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import service -from google.longrunning import operations_pb2 # type: ignore -from .base import AutoMlTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AutoMlGrpcTransport(AutoMlTransport): - """gRPC backend transport for AutoMl. - - AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - operations_pb2.Operation]: - r"""Return a callable for the create dataset method over gRPC. - - Creates a dataset. - - Returns: - Callable[[~.CreateDatasetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dataset' not in self._stubs: - self._stubs['create_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/CreateDataset', - request_serializer=service.CreateDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_dataset'] - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - dataset.Dataset]: - r"""Return a callable for the get dataset method over gRPC. - - Gets a dataset. - - Returns: - Callable[[~.GetDatasetRequest], - ~.Dataset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dataset' not in self._stubs: - self._stubs['get_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetDataset', - request_serializer=service.GetDatasetRequest.serialize, - response_deserializer=dataset.Dataset.deserialize, - ) - return self._stubs['get_dataset'] - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - service.ListDatasetsResponse]: - r"""Return a callable for the list datasets method over gRPC. - - Lists datasets in a project. - - Returns: - Callable[[~.ListDatasetsRequest], - ~.ListDatasetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_datasets' not in self._stubs: - self._stubs['list_datasets'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ListDatasets', - request_serializer=service.ListDatasetsRequest.serialize, - response_deserializer=service.ListDatasetsResponse.deserialize, - ) - return self._stubs['list_datasets'] - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - gca_dataset.Dataset]: - r"""Return a callable for the update dataset method over gRPC. - - Updates a dataset. - - Returns: - Callable[[~.UpdateDatasetRequest], - ~.Dataset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_dataset' not in self._stubs: - self._stubs['update_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/UpdateDataset', - request_serializer=service.UpdateDatasetRequest.serialize, - response_deserializer=gca_dataset.Dataset.deserialize, - ) - return self._stubs['update_dataset'] - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete dataset method over gRPC. - - Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteDatasetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dataset' not in self._stubs: - self._stubs['delete_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/DeleteDataset', - request_serializer=service.DeleteDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_dataset'] - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - operations_pb2.Operation]: - r"""Return a callable for the import data method over gRPC. - - Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - Returns: - Callable[[~.ImportDataRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_data' not in self._stubs: - self._stubs['import_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ImportData', - request_serializer=service.ImportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_data'] - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - operations_pb2.Operation]: - r"""Return a callable for the export data method over gRPC. - - Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportDataRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_data' not in self._stubs: - self._stubs['export_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ExportData', - request_serializer=service.ExportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_data'] - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - annotation_spec.AnnotationSpec]: - r"""Return a callable for the get annotation spec method over gRPC. - - Gets an annotation spec. - - Returns: - Callable[[~.GetAnnotationSpecRequest], - ~.AnnotationSpec]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_annotation_spec' not in self._stubs: - self._stubs['get_annotation_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetAnnotationSpec', - request_serializer=service.GetAnnotationSpecRequest.serialize, - response_deserializer=annotation_spec.AnnotationSpec.deserialize, - ) - return self._stubs['get_annotation_spec'] - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the create model method over gRPC. - - Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - Returns: - Callable[[~.CreateModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_model' not in self._stubs: - self._stubs['create_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/CreateModel', - request_serializer=service.CreateModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_model'] - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - model.Model]: - r"""Return a callable for the get model method over gRPC. - - Gets a model. - - Returns: - Callable[[~.GetModelRequest], - ~.Model]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model' not in self._stubs: - self._stubs['get_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetModel', - request_serializer=service.GetModelRequest.serialize, - response_deserializer=model.Model.deserialize, - ) - return self._stubs['get_model'] - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - service.ListModelsResponse]: - r"""Return a callable for the list models method over gRPC. - - Lists models. - - Returns: - Callable[[~.ListModelsRequest], - ~.ListModelsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_models' not in self._stubs: - self._stubs['list_models'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ListModels', - request_serializer=service.ListModelsRequest.serialize, - response_deserializer=service.ListModelsResponse.deserialize, - ) - return self._stubs['list_models'] - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete model method over gRPC. - - Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_model' not in self._stubs: - self._stubs['delete_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/DeleteModel', - request_serializer=service.DeleteModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_model'] - - @property - def update_model(self) -> Callable[ - [service.UpdateModelRequest], - gca_model.Model]: - r"""Return a callable for the update model method over gRPC. - - Updates a model. - - Returns: - Callable[[~.UpdateModelRequest], - ~.Model]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_model' not in self._stubs: - self._stubs['update_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/UpdateModel', - request_serializer=service.UpdateModelRequest.serialize, - response_deserializer=gca_model.Model.deserialize, - ) - return self._stubs['update_model'] - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the deploy model method over gRPC. - - Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - [node_number][google.cloud.automl.v1p1beta.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.DeployModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deploy_model' not in self._stubs: - self._stubs['deploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/DeployModel', - request_serializer=service.DeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['deploy_model'] - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the undeploy model method over gRPC. - - Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.UndeployModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'undeploy_model' not in self._stubs: - self._stubs['undeploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/UndeployModel', - request_serializer=service.UndeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['undeploy_model'] - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the export model method over gRPC. - - Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - [ModelExportOutputConfig][google.cloud.automl.v1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_model' not in self._stubs: - self._stubs['export_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ExportModel', - request_serializer=service.ExportModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_model'] - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - model_evaluation.ModelEvaluation]: - r"""Return a callable for the get model evaluation method over gRPC. - - Gets a model evaluation. - - Returns: - Callable[[~.GetModelEvaluationRequest], - ~.ModelEvaluation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model_evaluation' not in self._stubs: - self._stubs['get_model_evaluation'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetModelEvaluation', - request_serializer=service.GetModelEvaluationRequest.serialize, - response_deserializer=model_evaluation.ModelEvaluation.deserialize, - ) - return self._stubs['get_model_evaluation'] - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - service.ListModelEvaluationsResponse]: - r"""Return a callable for the list model evaluations method over gRPC. - - Lists model evaluations. - - Returns: - Callable[[~.ListModelEvaluationsRequest], - ~.ListModelEvaluationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_model_evaluations' not in self._stubs: - self._stubs['list_model_evaluations'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ListModelEvaluations', - request_serializer=service.ListModelEvaluationsRequest.serialize, - response_deserializer=service.ListModelEvaluationsResponse.deserialize, - ) - return self._stubs['list_model_evaluations'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AutoMlGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/grpc_asyncio.py deleted file mode 100644 index 9d5749090350..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/grpc_asyncio.py +++ /dev/null @@ -1,1074 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import service -from google.longrunning import operations_pb2 # type: ignore -from .base import AutoMlTransport, DEFAULT_CLIENT_INFO -from .grpc import AutoMlGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AutoMlGrpcAsyncIOTransport(AutoMlTransport): - """gRPC AsyncIO backend transport for AutoMl. - - AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create dataset method over gRPC. - - Creates a dataset. - - Returns: - Callable[[~.CreateDatasetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dataset' not in self._stubs: - self._stubs['create_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/CreateDataset', - request_serializer=service.CreateDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_dataset'] - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - Awaitable[dataset.Dataset]]: - r"""Return a callable for the get dataset method over gRPC. - - Gets a dataset. - - Returns: - Callable[[~.GetDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dataset' not in self._stubs: - self._stubs['get_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetDataset', - request_serializer=service.GetDatasetRequest.serialize, - response_deserializer=dataset.Dataset.deserialize, - ) - return self._stubs['get_dataset'] - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - Awaitable[service.ListDatasetsResponse]]: - r"""Return a callable for the list datasets method over gRPC. - - Lists datasets in a project. - - Returns: - Callable[[~.ListDatasetsRequest], - Awaitable[~.ListDatasetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_datasets' not in self._stubs: - self._stubs['list_datasets'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ListDatasets', - request_serializer=service.ListDatasetsRequest.serialize, - response_deserializer=service.ListDatasetsResponse.deserialize, - ) - return self._stubs['list_datasets'] - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - Awaitable[gca_dataset.Dataset]]: - r"""Return a callable for the update dataset method over gRPC. - - Updates a dataset. - - Returns: - Callable[[~.UpdateDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_dataset' not in self._stubs: - self._stubs['update_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/UpdateDataset', - request_serializer=service.UpdateDatasetRequest.serialize, - response_deserializer=gca_dataset.Dataset.deserialize, - ) - return self._stubs['update_dataset'] - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete dataset method over gRPC. - - Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteDatasetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dataset' not in self._stubs: - self._stubs['delete_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/DeleteDataset', - request_serializer=service.DeleteDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_dataset'] - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the import data method over gRPC. - - Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - Returns: - Callable[[~.ImportDataRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_data' not in self._stubs: - self._stubs['import_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ImportData', - request_serializer=service.ImportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_data'] - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export data method over gRPC. - - Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportDataRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_data' not in self._stubs: - self._stubs['export_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ExportData', - request_serializer=service.ExportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_data'] - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - Awaitable[annotation_spec.AnnotationSpec]]: - r"""Return a callable for the get annotation spec method over gRPC. - - Gets an annotation spec. - - Returns: - Callable[[~.GetAnnotationSpecRequest], - Awaitable[~.AnnotationSpec]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_annotation_spec' not in self._stubs: - self._stubs['get_annotation_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetAnnotationSpec', - request_serializer=service.GetAnnotationSpecRequest.serialize, - response_deserializer=annotation_spec.AnnotationSpec.deserialize, - ) - return self._stubs['get_annotation_spec'] - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create model method over gRPC. - - Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - Returns: - Callable[[~.CreateModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_model' not in self._stubs: - self._stubs['create_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/CreateModel', - request_serializer=service.CreateModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_model'] - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - Awaitable[model.Model]]: - r"""Return a callable for the get model method over gRPC. - - Gets a model. - - Returns: - Callable[[~.GetModelRequest], - Awaitable[~.Model]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model' not in self._stubs: - self._stubs['get_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetModel', - request_serializer=service.GetModelRequest.serialize, - response_deserializer=model.Model.deserialize, - ) - return self._stubs['get_model'] - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - Awaitable[service.ListModelsResponse]]: - r"""Return a callable for the list models method over gRPC. - - Lists models. - - Returns: - Callable[[~.ListModelsRequest], - Awaitable[~.ListModelsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_models' not in self._stubs: - self._stubs['list_models'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ListModels', - request_serializer=service.ListModelsRequest.serialize, - response_deserializer=service.ListModelsResponse.deserialize, - ) - return self._stubs['list_models'] - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete model method over gRPC. - - Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_model' not in self._stubs: - self._stubs['delete_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/DeleteModel', - request_serializer=service.DeleteModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_model'] - - @property - def update_model(self) -> Callable[ - [service.UpdateModelRequest], - Awaitable[gca_model.Model]]: - r"""Return a callable for the update model method over gRPC. - - Updates a model. - - Returns: - Callable[[~.UpdateModelRequest], - Awaitable[~.Model]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_model' not in self._stubs: - self._stubs['update_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/UpdateModel', - request_serializer=service.UpdateModelRequest.serialize, - response_deserializer=gca_model.Model.deserialize, - ) - return self._stubs['update_model'] - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the deploy model method over gRPC. - - Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - [node_number][google.cloud.automl.v1p1beta.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.DeployModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deploy_model' not in self._stubs: - self._stubs['deploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/DeployModel', - request_serializer=service.DeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['deploy_model'] - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the undeploy model method over gRPC. - - Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.UndeployModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'undeploy_model' not in self._stubs: - self._stubs['undeploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/UndeployModel', - request_serializer=service.UndeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['undeploy_model'] - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export model method over gRPC. - - Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - [ModelExportOutputConfig][google.cloud.automl.v1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_model' not in self._stubs: - self._stubs['export_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ExportModel', - request_serializer=service.ExportModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_model'] - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - Awaitable[model_evaluation.ModelEvaluation]]: - r"""Return a callable for the get model evaluation method over gRPC. - - Gets a model evaluation. - - Returns: - Callable[[~.GetModelEvaluationRequest], - Awaitable[~.ModelEvaluation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model_evaluation' not in self._stubs: - self._stubs['get_model_evaluation'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/GetModelEvaluation', - request_serializer=service.GetModelEvaluationRequest.serialize, - response_deserializer=model_evaluation.ModelEvaluation.deserialize, - ) - return self._stubs['get_model_evaluation'] - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - Awaitable[service.ListModelEvaluationsResponse]]: - r"""Return a callable for the list model evaluations method over gRPC. - - Lists model evaluations. - - Returns: - Callable[[~.ListModelEvaluationsRequest], - Awaitable[~.ListModelEvaluationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_model_evaluations' not in self._stubs: - self._stubs['list_model_evaluations'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.AutoMl/ListModelEvaluations', - request_serializer=service.ListModelEvaluationsRequest.serialize, - response_deserializer=service.ListModelEvaluationsResponse.deserialize, - ) - return self._stubs['list_model_evaluations'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_dataset: self._wrap_method( - self.create_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.get_dataset: self._wrap_method( - self.get_dataset, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_datasets: self._wrap_method( - self.list_datasets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_dataset: self._wrap_method( - self.update_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.delete_dataset: self._wrap_method( - self.delete_dataset, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.import_data: self._wrap_method( - self.import_data, - default_timeout=5.0, - client_info=client_info, - ), - self.export_data: self._wrap_method( - self.export_data, - default_timeout=5.0, - client_info=client_info, - ), - self.get_annotation_spec: self._wrap_method( - self.get_annotation_spec, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.create_model: self._wrap_method( - self.create_model, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model: self._wrap_method( - self.get_model, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_models: self._wrap_method( - self.list_models, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.delete_model: self._wrap_method( - self.delete_model, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_model: self._wrap_method( - self.update_model, - default_timeout=5.0, - client_info=client_info, - ), - self.deploy_model: self._wrap_method( - self.deploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.undeploy_model: self._wrap_method( - self.undeploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.export_model: self._wrap_method( - self.export_model, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model_evaluation: self._wrap_method( - self.get_model_evaluation, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_model_evaluations: self._wrap_method( - self.list_model_evaluations, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'AutoMlGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/rest.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/rest.py deleted file mode 100644 index 3011db2b33e6..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/rest.py +++ /dev/null @@ -1,3408 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAutoMlRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AutoMlRestInterceptor: - """Interceptor for AutoMl. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AutoMlRestTransport. - - .. code-block:: python - class MyCustomAutoMlInterceptor(AutoMlRestInterceptor): - def pre_create_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_deploy_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_deploy_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_data(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_data(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_annotation_spec(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_annotation_spec(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_model_evaluation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_model_evaluation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_import_data(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_import_data(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_datasets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_datasets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_model_evaluations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_model_evaluations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_models(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_models(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_undeploy_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_undeploy_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_model(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AutoMlRestTransport(interceptor=MyCustomAutoMlInterceptor()) - client = AutoMlClient(transport=transport) - - - """ - def pre_create_dataset(self, request: service.CreateDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_create_dataset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_dataset - - DEPRECATED. Please use the `post_create_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_create_dataset` interceptor runs - before the `post_create_dataset_with_metadata` interceptor. - """ - return response - - def post_create_dataset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_create_dataset_with_metadata` - interceptor in new development instead of the `post_create_dataset` interceptor. - When both interceptors are used, this `post_create_dataset_with_metadata` interceptor runs after the - `post_create_dataset` interceptor. The (possibly modified) response returned by - `post_create_dataset` will be passed to - `post_create_dataset_with_metadata`. - """ - return response, metadata - - def pre_create_model(self, request: service.CreateModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_create_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_model - - DEPRECATED. Please use the `post_create_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_create_model` interceptor runs - before the `post_create_model_with_metadata` interceptor. - """ - return response - - def post_create_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_create_model_with_metadata` - interceptor in new development instead of the `post_create_model` interceptor. - When both interceptors are used, this `post_create_model_with_metadata` interceptor runs after the - `post_create_model` interceptor. The (possibly modified) response returned by - `post_create_model` will be passed to - `post_create_model_with_metadata`. - """ - return response, metadata - - def pre_delete_dataset(self, request: service.DeleteDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_delete_dataset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_dataset - - DEPRECATED. Please use the `post_delete_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_delete_dataset` interceptor runs - before the `post_delete_dataset_with_metadata` interceptor. - """ - return response - - def post_delete_dataset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_delete_dataset_with_metadata` - interceptor in new development instead of the `post_delete_dataset` interceptor. - When both interceptors are used, this `post_delete_dataset_with_metadata` interceptor runs after the - `post_delete_dataset` interceptor. The (possibly modified) response returned by - `post_delete_dataset` will be passed to - `post_delete_dataset_with_metadata`. - """ - return response, metadata - - def pre_delete_model(self, request: service.DeleteModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_delete_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_model - - DEPRECATED. Please use the `post_delete_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_delete_model` interceptor runs - before the `post_delete_model_with_metadata` interceptor. - """ - return response - - def post_delete_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_delete_model_with_metadata` - interceptor in new development instead of the `post_delete_model` interceptor. - When both interceptors are used, this `post_delete_model_with_metadata` interceptor runs after the - `post_delete_model` interceptor. The (possibly modified) response returned by - `post_delete_model` will be passed to - `post_delete_model_with_metadata`. - """ - return response, metadata - - def pre_deploy_model(self, request: service.DeployModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeployModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for deploy_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_deploy_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for deploy_model - - DEPRECATED. Please use the `post_deploy_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_deploy_model` interceptor runs - before the `post_deploy_model_with_metadata` interceptor. - """ - return response - - def post_deploy_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for deploy_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_deploy_model_with_metadata` - interceptor in new development instead of the `post_deploy_model` interceptor. - When both interceptors are used, this `post_deploy_model_with_metadata` interceptor runs after the - `post_deploy_model` interceptor. The (possibly modified) response returned by - `post_deploy_model` will be passed to - `post_deploy_model_with_metadata`. - """ - return response, metadata - - def pre_export_data(self, request: service.ExportDataRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ExportDataRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for export_data - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_export_data(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_data - - DEPRECATED. Please use the `post_export_data_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_export_data` interceptor runs - before the `post_export_data_with_metadata` interceptor. - """ - return response - - def post_export_data_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_data - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_export_data_with_metadata` - interceptor in new development instead of the `post_export_data` interceptor. - When both interceptors are used, this `post_export_data_with_metadata` interceptor runs after the - `post_export_data` interceptor. The (possibly modified) response returned by - `post_export_data` will be passed to - `post_export_data_with_metadata`. - """ - return response, metadata - - def pre_export_model(self, request: service.ExportModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ExportModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for export_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_export_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_model - - DEPRECATED. Please use the `post_export_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_export_model` interceptor runs - before the `post_export_model_with_metadata` interceptor. - """ - return response - - def post_export_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_export_model_with_metadata` - interceptor in new development instead of the `post_export_model` interceptor. - When both interceptors are used, this `post_export_model_with_metadata` interceptor runs after the - `post_export_model` interceptor. The (possibly modified) response returned by - `post_export_model` will be passed to - `post_export_model_with_metadata`. - """ - return response, metadata - - def pre_get_annotation_spec(self, request: service.GetAnnotationSpecRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetAnnotationSpecRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_annotation_spec - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_annotation_spec(self, response: annotation_spec.AnnotationSpec) -> annotation_spec.AnnotationSpec: - """Post-rpc interceptor for get_annotation_spec - - DEPRECATED. Please use the `post_get_annotation_spec_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_annotation_spec` interceptor runs - before the `post_get_annotation_spec_with_metadata` interceptor. - """ - return response - - def post_get_annotation_spec_with_metadata(self, response: annotation_spec.AnnotationSpec, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[annotation_spec.AnnotationSpec, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_annotation_spec - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_annotation_spec_with_metadata` - interceptor in new development instead of the `post_get_annotation_spec` interceptor. - When both interceptors are used, this `post_get_annotation_spec_with_metadata` interceptor runs after the - `post_get_annotation_spec` interceptor. The (possibly modified) response returned by - `post_get_annotation_spec` will be passed to - `post_get_annotation_spec_with_metadata`. - """ - return response, metadata - - def pre_get_dataset(self, request: service.GetDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: - """Post-rpc interceptor for get_dataset - - DEPRECATED. Please use the `post_get_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_dataset` interceptor runs - before the `post_get_dataset_with_metadata` interceptor. - """ - return response - - def post_get_dataset_with_metadata(self, response: dataset.Dataset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_dataset_with_metadata` - interceptor in new development instead of the `post_get_dataset` interceptor. - When both interceptors are used, this `post_get_dataset_with_metadata` interceptor runs after the - `post_get_dataset` interceptor. The (possibly modified) response returned by - `post_get_dataset` will be passed to - `post_get_dataset_with_metadata`. - """ - return response, metadata - - def pre_get_model(self, request: service.GetModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_model(self, response: model.Model) -> model.Model: - """Post-rpc interceptor for get_model - - DEPRECATED. Please use the `post_get_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_model` interceptor runs - before the `post_get_model_with_metadata` interceptor. - """ - return response - - def post_get_model_with_metadata(self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_model_with_metadata` - interceptor in new development instead of the `post_get_model` interceptor. - When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the - `post_get_model` interceptor. The (possibly modified) response returned by - `post_get_model` will be passed to - `post_get_model_with_metadata`. - """ - return response, metadata - - def pre_get_model_evaluation(self, request: service.GetModelEvaluationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetModelEvaluationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_model_evaluation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_model_evaluation(self, response: model_evaluation.ModelEvaluation) -> model_evaluation.ModelEvaluation: - """Post-rpc interceptor for get_model_evaluation - - DEPRECATED. Please use the `post_get_model_evaluation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_model_evaluation` interceptor runs - before the `post_get_model_evaluation_with_metadata` interceptor. - """ - return response - - def post_get_model_evaluation_with_metadata(self, response: model_evaluation.ModelEvaluation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[model_evaluation.ModelEvaluation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_model_evaluation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_model_evaluation_with_metadata` - interceptor in new development instead of the `post_get_model_evaluation` interceptor. - When both interceptors are used, this `post_get_model_evaluation_with_metadata` interceptor runs after the - `post_get_model_evaluation` interceptor. The (possibly modified) response returned by - `post_get_model_evaluation` will be passed to - `post_get_model_evaluation_with_metadata`. - """ - return response, metadata - - def pre_import_data(self, request: service.ImportDataRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ImportDataRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for import_data - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_import_data(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for import_data - - DEPRECATED. Please use the `post_import_data_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_import_data` interceptor runs - before the `post_import_data_with_metadata` interceptor. - """ - return response - - def post_import_data_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for import_data - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_import_data_with_metadata` - interceptor in new development instead of the `post_import_data` interceptor. - When both interceptors are used, this `post_import_data_with_metadata` interceptor runs after the - `post_import_data` interceptor. The (possibly modified) response returned by - `post_import_data` will be passed to - `post_import_data_with_metadata`. - """ - return response, metadata - - def pre_list_datasets(self, request: service.ListDatasetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListDatasetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_datasets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_datasets(self, response: service.ListDatasetsResponse) -> service.ListDatasetsResponse: - """Post-rpc interceptor for list_datasets - - DEPRECATED. Please use the `post_list_datasets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_datasets` interceptor runs - before the `post_list_datasets_with_metadata` interceptor. - """ - return response - - def post_list_datasets_with_metadata(self, response: service.ListDatasetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListDatasetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_datasets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_datasets_with_metadata` - interceptor in new development instead of the `post_list_datasets` interceptor. - When both interceptors are used, this `post_list_datasets_with_metadata` interceptor runs after the - `post_list_datasets` interceptor. The (possibly modified) response returned by - `post_list_datasets` will be passed to - `post_list_datasets_with_metadata`. - """ - return response, metadata - - def pre_list_model_evaluations(self, request: service.ListModelEvaluationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelEvaluationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_model_evaluations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_model_evaluations(self, response: service.ListModelEvaluationsResponse) -> service.ListModelEvaluationsResponse: - """Post-rpc interceptor for list_model_evaluations - - DEPRECATED. Please use the `post_list_model_evaluations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_model_evaluations` interceptor runs - before the `post_list_model_evaluations_with_metadata` interceptor. - """ - return response - - def post_list_model_evaluations_with_metadata(self, response: service.ListModelEvaluationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelEvaluationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_model_evaluations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_model_evaluations_with_metadata` - interceptor in new development instead of the `post_list_model_evaluations` interceptor. - When both interceptors are used, this `post_list_model_evaluations_with_metadata` interceptor runs after the - `post_list_model_evaluations` interceptor. The (possibly modified) response returned by - `post_list_model_evaluations` will be passed to - `post_list_model_evaluations_with_metadata`. - """ - return response, metadata - - def pre_list_models(self, request: service.ListModelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_models - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_models(self, response: service.ListModelsResponse) -> service.ListModelsResponse: - """Post-rpc interceptor for list_models - - DEPRECATED. Please use the `post_list_models_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_models` interceptor runs - before the `post_list_models_with_metadata` interceptor. - """ - return response - - def post_list_models_with_metadata(self, response: service.ListModelsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_models - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_models_with_metadata` - interceptor in new development instead of the `post_list_models` interceptor. - When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the - `post_list_models` interceptor. The (possibly modified) response returned by - `post_list_models` will be passed to - `post_list_models_with_metadata`. - """ - return response, metadata - - def pre_undeploy_model(self, request: service.UndeployModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UndeployModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for undeploy_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_undeploy_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for undeploy_model - - DEPRECATED. Please use the `post_undeploy_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_undeploy_model` interceptor runs - before the `post_undeploy_model_with_metadata` interceptor. - """ - return response - - def post_undeploy_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for undeploy_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_undeploy_model_with_metadata` - interceptor in new development instead of the `post_undeploy_model` interceptor. - When both interceptors are used, this `post_undeploy_model_with_metadata` interceptor runs after the - `post_undeploy_model` interceptor. The (possibly modified) response returned by - `post_undeploy_model` will be passed to - `post_undeploy_model_with_metadata`. - """ - return response, metadata - - def pre_update_dataset(self, request: service.UpdateDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_update_dataset(self, response: gca_dataset.Dataset) -> gca_dataset.Dataset: - """Post-rpc interceptor for update_dataset - - DEPRECATED. Please use the `post_update_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_update_dataset` interceptor runs - before the `post_update_dataset_with_metadata` interceptor. - """ - return response - - def post_update_dataset_with_metadata(self, response: gca_dataset.Dataset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gca_dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_update_dataset_with_metadata` - interceptor in new development instead of the `post_update_dataset` interceptor. - When both interceptors are used, this `post_update_dataset_with_metadata` interceptor runs after the - `post_update_dataset` interceptor. The (possibly modified) response returned by - `post_update_dataset` will be passed to - `post_update_dataset_with_metadata`. - """ - return response, metadata - - def pre_update_model(self, request: service.UpdateModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_update_model(self, response: gca_model.Model) -> gca_model.Model: - """Post-rpc interceptor for update_model - - DEPRECATED. Please use the `post_update_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_update_model` interceptor runs - before the `post_update_model_with_metadata` interceptor. - """ - return response - - def post_update_model_with_metadata(self, response: gca_model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gca_model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_update_model_with_metadata` - interceptor in new development instead of the `post_update_model` interceptor. - When both interceptors are used, this `post_update_model_with_metadata` interceptor runs after the - `post_update_model` interceptor. The (possibly modified) response returned by - `post_update_model` will be passed to - `post_update_model_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class AutoMlRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AutoMlRestInterceptor - - -class AutoMlRestTransport(_BaseAutoMlRestTransport): - """REST backend synchronous transport for AutoMl. - - AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AutoMlRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AutoMlRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - 'google.longrunning.Operations.WaitOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateDataset(_BaseAutoMlRestTransport._BaseCreateDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.CreateDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create dataset method over HTTP. - - Args: - request (~.service.CreateDatasetRequest): - The request object. Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1.AutoMl.CreateDataset]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseCreateDataset._get_http_options() - - request, metadata = self._interceptor.pre_create_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseCreateDataset._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseCreateDataset._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseCreateDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.CreateDataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "CreateDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._CreateDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.create_dataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "CreateDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateModel(_BaseAutoMlRestTransport._BaseCreateModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.CreateModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create model method over HTTP. - - Args: - request (~.service.CreateModelRequest): - The request object. Request message for - [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseCreateModel._get_http_options() - - request, metadata = self._interceptor.pre_create_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseCreateModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseCreateModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseCreateModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.CreateModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "CreateModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._CreateModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.create_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "CreateModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataset(_BaseAutoMlRestTransport._BaseDeleteDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.DeleteDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete dataset method over HTTP. - - Args: - request (~.service.DeleteDatasetRequest): - The request object. Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseDeleteDataset._get_http_options() - - request, metadata = self._interceptor.pre_delete_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseDeleteDataset._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseDeleteDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.DeleteDataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "DeleteDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._DeleteDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.delete_dataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "DeleteDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteModel(_BaseAutoMlRestTransport._BaseDeleteModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.DeleteModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete model method over HTTP. - - Args: - request (~.service.DeleteModelRequest): - The request object. Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseDeleteModel._get_http_options() - - request, metadata = self._interceptor.pre_delete_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseDeleteModel._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseDeleteModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.DeleteModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "DeleteModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._DeleteModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.delete_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "DeleteModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeployModel(_BaseAutoMlRestTransport._BaseDeployModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.DeployModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.DeployModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the deploy model method over HTTP. - - Args: - request (~.service.DeployModelRequest): - The request object. Request message for - [AutoMl.DeployModel][google.cloud.automl.v1.AutoMl.DeployModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseDeployModel._get_http_options() - - request, metadata = self._interceptor.pre_deploy_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseDeployModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseDeployModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseDeployModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.DeployModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "DeployModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._DeployModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_deploy_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_deploy_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.deploy_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "DeployModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExportData(_BaseAutoMlRestTransport._BaseExportData, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ExportData") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ExportDataRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the export data method over HTTP. - - Args: - request (~.service.ExportDataRequest): - The request object. Request message for - [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseExportData._get_http_options() - - request, metadata = self._interceptor.pre_export_data(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseExportData._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseExportData._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseExportData._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.ExportData", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ExportData", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ExportData._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_data(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_data_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.export_data", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ExportData", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExportModel(_BaseAutoMlRestTransport._BaseExportModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ExportModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ExportModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the export model method over HTTP. - - Args: - request (~.service.ExportModelRequest): - The request object. Request message for - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an - error code will be returned. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseExportModel._get_http_options() - - request, metadata = self._interceptor.pre_export_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseExportModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseExportModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseExportModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.ExportModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ExportModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ExportModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.export_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ExportModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAnnotationSpec(_BaseAutoMlRestTransport._BaseGetAnnotationSpec, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetAnnotationSpec") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetAnnotationSpecRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> annotation_spec.AnnotationSpec: - r"""Call the get annotation spec method over HTTP. - - Args: - request (~.service.GetAnnotationSpecRequest): - The request object. Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1.AutoMl.GetAnnotationSpec]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.annotation_spec.AnnotationSpec: - A definition of an annotation spec. - """ - - http_options = _BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_http_options() - - request, metadata = self._interceptor.pre_get_annotation_spec(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.GetAnnotationSpec", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetAnnotationSpec", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetAnnotationSpec._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = annotation_spec.AnnotationSpec() - pb_resp = annotation_spec.AnnotationSpec.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_annotation_spec(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_annotation_spec_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = annotation_spec.AnnotationSpec.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.get_annotation_spec", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetAnnotationSpec", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataset(_BaseAutoMlRestTransport._BaseGetDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dataset.Dataset: - r"""Call the get dataset method over HTTP. - - Args: - request (~.service.GetDatasetRequest): - The request object. Request message for - [AutoMl.GetDataset][google.cloud.automl.v1.AutoMl.GetDataset]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dataset.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - - http_options = _BaseAutoMlRestTransport._BaseGetDataset._get_http_options() - - request, metadata = self._interceptor.pre_get_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetDataset._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.GetDataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dataset.Dataset() - pb_resp = dataset.Dataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dataset.Dataset.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.get_dataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetModel(_BaseAutoMlRestTransport._BaseGetModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> model.Model: - r"""Call the get model method over HTTP. - - Args: - request (~.service.GetModelRequest): - The request object. Request message for - [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.model.Model: - API proto representing a trained - machine learning model. - - """ - - http_options = _BaseAutoMlRestTransport._BaseGetModel._get_http_options() - - request, metadata = self._interceptor.pre_get_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetModel._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.GetModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = model.Model() - pb_resp = model.Model.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = model.Model.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.get_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetModelEvaluation(_BaseAutoMlRestTransport._BaseGetModelEvaluation, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetModelEvaluation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetModelEvaluationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> model_evaluation.ModelEvaluation: - r"""Call the get model evaluation method over HTTP. - - Args: - request (~.service.GetModelEvaluationRequest): - The request object. Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.model_evaluation.ModelEvaluation: - Evaluation results of a model. - """ - - http_options = _BaseAutoMlRestTransport._BaseGetModelEvaluation._get_http_options() - - request, metadata = self._interceptor.pre_get_model_evaluation(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetModelEvaluation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetModelEvaluation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.GetModelEvaluation", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetModelEvaluation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetModelEvaluation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = model_evaluation.ModelEvaluation() - pb_resp = model_evaluation.ModelEvaluation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_model_evaluation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_model_evaluation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = model_evaluation.ModelEvaluation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.get_model_evaluation", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "GetModelEvaluation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ImportData(_BaseAutoMlRestTransport._BaseImportData, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ImportData") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ImportDataRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the import data method over HTTP. - - Args: - request (~.service.ImportDataRequest): - The request object. Request message for - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseImportData._get_http_options() - - request, metadata = self._interceptor.pre_import_data(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseImportData._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseImportData._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseImportData._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.ImportData", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ImportData", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ImportData._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_import_data(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_import_data_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.import_data", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ImportData", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatasets(_BaseAutoMlRestTransport._BaseListDatasets, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListDatasets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListDatasetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListDatasetsResponse: - r"""Call the list datasets method over HTTP. - - Args: - request (~.service.ListDatasetsRequest): - The request object. Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListDatasetsResponse: - Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListDatasets._get_http_options() - - request, metadata = self._interceptor.pre_list_datasets(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListDatasets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListDatasets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.ListDatasets", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ListDatasets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListDatasets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListDatasetsResponse() - pb_resp = service.ListDatasetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_datasets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_datasets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListDatasetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.list_datasets", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ListDatasets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListModelEvaluations(_BaseAutoMlRestTransport._BaseListModelEvaluations, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListModelEvaluations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListModelEvaluationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListModelEvaluationsResponse: - r"""Call the list model evaluations method over HTTP. - - Args: - request (~.service.ListModelEvaluationsRequest): - The request object. Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListModelEvaluationsResponse: - Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListModelEvaluations._get_http_options() - - request, metadata = self._interceptor.pre_list_model_evaluations(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListModelEvaluations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListModelEvaluations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.ListModelEvaluations", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ListModelEvaluations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListModelEvaluations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListModelEvaluationsResponse() - pb_resp = service.ListModelEvaluationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_model_evaluations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_model_evaluations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListModelEvaluationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.list_model_evaluations", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ListModelEvaluations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListModels(_BaseAutoMlRestTransport._BaseListModels, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListModels") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListModelsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListModelsResponse: - r"""Call the list models method over HTTP. - - Args: - request (~.service.ListModelsRequest): - The request object. Request message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListModelsResponse: - Response message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListModels._get_http_options() - - request, metadata = self._interceptor.pre_list_models(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListModels._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListModels._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.ListModels", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ListModels", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListModels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListModelsResponse() - pb_resp = service.ListModelsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_models(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_models_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListModelsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.list_models", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "ListModels", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UndeployModel(_BaseAutoMlRestTransport._BaseUndeployModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UndeployModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UndeployModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the undeploy model method over HTTP. - - Args: - request (~.service.UndeployModelRequest): - The request object. Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1.AutoMl.UndeployModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseUndeployModel._get_http_options() - - request, metadata = self._interceptor.pre_undeploy_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUndeployModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUndeployModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUndeployModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.UndeployModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "UndeployModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UndeployModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_undeploy_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_undeploy_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.undeploy_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "UndeployModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataset(_BaseAutoMlRestTransport._BaseUpdateDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UpdateDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gca_dataset.Dataset: - r"""Call the update dataset method over HTTP. - - Args: - request (~.service.UpdateDatasetRequest): - The request object. Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gca_dataset.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - - http_options = _BaseAutoMlRestTransport._BaseUpdateDataset._get_http_options() - - request, metadata = self._interceptor.pre_update_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUpdateDataset._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUpdateDataset._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUpdateDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.UpdateDataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "UpdateDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UpdateDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gca_dataset.Dataset() - pb_resp = gca_dataset.Dataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gca_dataset.Dataset.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.update_dataset", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "UpdateDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateModel(_BaseAutoMlRestTransport._BaseUpdateModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UpdateModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gca_model.Model: - r"""Call the update model method over HTTP. - - Args: - request (~.service.UpdateModelRequest): - The request object. Request message for - [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gca_model.Model: - API proto representing a trained - machine learning model. - - """ - - http_options = _BaseAutoMlRestTransport._BaseUpdateModel._get_http_options() - - request, metadata = self._interceptor.pre_update_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUpdateModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUpdateModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUpdateModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.AutoMlClient.UpdateModel", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "UpdateModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UpdateModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gca_model.Model() - pb_resp = gca_model.Model.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gca_model.Model.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.AutoMlClient.update_model", - extra = { - "serviceName": "google.cloud.automl.v1.AutoMl", - "rpcName": "UpdateModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeployModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportData(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - annotation_spec.AnnotationSpec]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAnnotationSpec(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - dataset.Dataset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - model.Model]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - model_evaluation.ModelEvaluation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetModelEvaluation(self._session, self._host, self._interceptor) # type: ignore - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ImportData(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - service.ListDatasetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatasets(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - service.ListModelEvaluationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListModelEvaluations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - service.ListModelsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListModels(self._session, self._host, self._interceptor) # type: ignore - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UndeployModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - gca_dataset.Dataset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_model(self) -> Callable[ - [service.UpdateModelRequest], - gca_model.Model]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AutoMlRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/rest_base.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/rest_base.py deleted file mode 100644 index 77f14ea70f6b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/auto_ml/transports/rest_base.py +++ /dev/null @@ -1,854 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AutoMlTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAutoMlRestTransport(AutoMlTransport): - """Base REST backend transport for AutoMl. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/datasets', - 'body': 'dataset', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseCreateDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/models', - 'body': 'model', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseCreateModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/datasets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseDeleteDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/models/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseDeleteModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeployModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/models/*}:deploy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeployModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseDeployModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportData: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/datasets/*}:exportData', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ExportDataRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseExportData._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/models/*}:export', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ExportModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseExportModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAnnotationSpec: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/datasets/*/annotationSpecs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetAnnotationSpecRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/datasets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/models/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetModelEvaluation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/models/*/modelEvaluations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetModelEvaluationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetModelEvaluation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseImportData: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/datasets/*}:importData', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ImportDataRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseImportData._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatasets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/datasets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListDatasetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListDatasets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListModelEvaluations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "filter" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/models/*}/modelEvaluations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListModelEvaluationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListModelEvaluations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListModels: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/models', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListModelsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListModels._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUndeployModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/models/*}:undeploy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UndeployModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUndeployModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{dataset.name=projects/*/locations/*/datasets/*}', - 'body': 'dataset', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUpdateDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{model.name=projects/*/locations/*/models/*}', - 'body': 'model', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUpdateModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseAutoMlRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/__init__.py deleted file mode 100644 index 857ae0200982..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PredictionServiceClient -from .async_client import PredictionServiceAsyncClient - -__all__ = ( - 'PredictionServiceClient', - 'PredictionServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/async_client.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/async_client.py deleted file mode 100644 index 22e1d0a24ef8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/async_client.py +++ /dev/null @@ -1,728 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.automl_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1.types import annotation_payload -from google.cloud.automl_v1.types import data_items -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import operations -from google.cloud.automl_v1.types import prediction_service -from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport -from .client import PredictionServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class PredictionServiceAsyncClient: - """AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - """ - - _client: PredictionServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE - - model_path = staticmethod(PredictionServiceClient.model_path) - parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) - common_billing_account_path = staticmethod(PredictionServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PredictionServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(PredictionServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(PredictionServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(PredictionServiceClient.parse_common_organization_path) - common_project_path = staticmethod(PredictionServiceClient.common_project_path) - parse_common_project_path = staticmethod(PredictionServiceClient.parse_common_project_path) - common_location_path = staticmethod(PredictionServiceClient.common_location_path) - parse_common_location_path = staticmethod(PredictionServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceAsyncClient: The constructed client. - """ - return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceAsyncClient: The constructed client. - """ - return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PredictionServiceTransport: - """Returns the transport used by the client instance. - - Returns: - PredictionServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = PredictionServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PredictionServiceTransport, Callable[..., PredictionServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the prediction service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the PredictionServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PredictionServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1.PredictionServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1.PredictionService", - "credentialsType": None, - } - ) - - async def predict(self, - request: Optional[Union[prediction_service.PredictRequest, dict]] = None, - *, - name: Optional[str] = None, - payload: Optional[data_items.ExamplePayload] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> prediction_service.PredictResponse: - r"""Perform an online prediction. The prediction result is directly - returned in the response. Available for following ML scenarios, - and their expected request payloads: - - AutoML Vision Classification - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Vision Object Detection - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Natural Language Classification - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Natural Language Entity Extraction - - - A TextSnippet up to 10,000 characters, UTF-8 NFC encoded or a - document in .PDF, .TIF or .TIFF format with size upto 20MB. - - AutoML Natural Language Sentiment Analysis - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Translation - - - A TextSnippet up to 25,000 characters, UTF-8 encoded. - - AutoML Tables - - - A row with column values matching the columns of the model, - up to 5MB. Not available for FORECASTING ``prediction_type``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_predict(): - # Create a client - client = automl_v1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - payload = automl_v1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = await client.predict(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.PredictRequest, dict]]): - The request object. Request message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - name (:class:`str`): - Required. Name of the model requested - to serve the prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - payload (:class:`google.cloud.automl_v1.types.ExamplePayload`): - Required. Payload to perform a - prediction on. The payload must match - the problem type that the model was - trained to solve. - - This corresponds to the ``payload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (:class:`MutableMapping[str, str]`): - Additional domain-specific parameters, any string must - be up to 25000 characters long. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for an image, it will - only produce results that have at least this confidence - score. The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects - on the image, it will only produce bounding boxes which - have at least this confidence score. Value in 0 to 1 - range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number - of bounding boxes returned. The default is 100. The - number of returned bounding boxes might be limited by - the server. - - AutoML Tables - - ``feature_importance`` : (boolean) Whether - [feature_importance][google.cloud.automl.v1.TablesModelColumnInfo.feature_importance] - is populated in the returned list of - [TablesAnnotation][google.cloud.automl.v1.TablesAnnotation] - objects. The default is false. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.PredictResponse: - Response message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, payload, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.PredictRequest): - request = prediction_service.PredictRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if payload is not None: - request.payload = payload - - if params: - request.params.update(params) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_predict(self, - request: Optional[Union[prediction_service.BatchPredictRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.BatchPredictInputConfig] = None, - output_config: Optional[io.BatchPredictOutputConfig] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML scenarios: - - - AutoML Vision Classification - - AutoML Vision Object Detection - - AutoML Video Intelligence Classification - - AutoML Video Intelligence Object Tracking \* AutoML Natural - Language Classification - - AutoML Natural Language Entity Extraction - - AutoML Natural Language Sentiment Analysis - - AutoML Tables - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - async def sample_batch_predict(): - # Create a client - client = automl_v1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - input_config = automl_v1.BatchPredictInputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - output_config = automl_v1.BatchPredictOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.BatchPredictRequest( - name="name_value", - input_config=input_config, - output_config=output_config, - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1.types.BatchPredictRequest, dict]]): - The request object. Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - name (:class:`str`): - Required. Name of the model requested - to serve the batch prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (:class:`google.cloud.automl_v1.types.BatchPredictInputConfig`): - Required. The input configuration for - batch prediction. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1.types.BatchPredictOutputConfig`): - Required. The Configuration - specifying where output predictions - should be written. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (:class:`MutableMapping[str, str]`): - Additional domain-specific parameters for the - predictions, any string must be up to 25000 characters - long. - - AutoML Natural Language Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for a text snippet, it - will only produce results that have at least this - confidence score. The default is 0.5. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for an image, it will - only produce results that have at least this confidence - score. The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects - on the image, it will only produce bounding boxes which - have at least this confidence score. Value in 0 to 1 - range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number - of bounding boxes returned per image. The default is - 100, the number of bounding boxes returned might be - limited by the server. AutoML Video Intelligence - Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for a video, it will - only produce results that have at least this confidence - score. The default is 0.5. - - ``segment_classification`` : (boolean) Set to true to - request segment-level classification. AutoML Video - Intelligence returns labels and their confidence scores - for the entire segment of the video that user specified - in the request configuration. The default is true. - - ``shot_classification`` : (boolean) Set to true to - request shot-level classification. AutoML Video - Intelligence determines the boundaries for each camera - shot in the entire segment of the video that user - specified in the request configuration. AutoML Video - Intelligence then returns labels and their confidence - scores for each detected shot, along with the start and - end time of the shot. The default is false. - - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on - training data, but there are no metrics provided to - describe that quality. - - ``1s_interval_classification`` : (boolean) Set to true - to request classification for a video at one-second - intervals. AutoML Video Intelligence returns labels and - their confidence scores for each second of the entire - segment of the video that user specified in the request - configuration. The default is false. - - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on - training data, but there are no metrics provided to - describe that quality. - - AutoML Video Intelligence Object Tracking - - ``score_threshold`` : (float) When Model detects objects - on video frames, it will only produce bounding boxes - which have at least this confidence score. Value in 0 to - 1 range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number - of bounding boxes returned per image. The default is - 100, the number of bounding boxes returned might be - limited by the server. - - ``min_bounding_box_size`` : (float) Only bounding boxes - with shortest edge at least that long as a relative - value of video frame size are returned. Value in 0 to 1 - range. Default is 0. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.automl_v1.types.BatchPredictResult` Result of the Batch Predict. This message is returned in - [response][google.longrunning.Operation.response] of - the operation returned by the - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config, output_config, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.BatchPredictRequest): - request = prediction_service.BatchPredictRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - if output_config is not None: - request.output_config = output_config - - if params: - request.params.update(params) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - prediction_service.BatchPredictResult, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "PredictionServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PredictionServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/client.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/client.py deleted file mode 100644 index 1ce0e1f6d4bf..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/client.py +++ /dev/null @@ -1,1086 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.automl_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1.types import annotation_payload -from google.cloud.automl_v1.types import data_items -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import operations -from google.cloud.automl_v1.types import prediction_service -from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PredictionServiceGrpcTransport -from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport -from .transports.rest import PredictionServiceRestTransport - - -class PredictionServiceClientMeta(type): - """Metaclass for the PredictionService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] - _transport_registry["grpc"] = PredictionServiceGrpcTransport - _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport - _transport_registry["rest"] = PredictionServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[PredictionServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PredictionServiceClient(metaclass=PredictionServiceClientMeta): - """AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "automl.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "automl.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PredictionServiceTransport: - """Returns the transport used by the client instance. - - Returns: - PredictionServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def model_path(project: str,location: str,model: str,) -> str: - """Returns a fully-qualified model string.""" - return "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - - @staticmethod - def parse_model_path(path: str) -> Dict[str,str]: - """Parses a model path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PredictionServiceTransport, Callable[..., PredictionServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the prediction service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the PredictionServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = PredictionServiceClient._read_environment_variables() - self._client_cert_source = PredictionServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = PredictionServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, PredictionServiceTransport) - if transport_provided: - # transport is a PredictionServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(PredictionServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - PredictionServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport]] = ( - PredictionServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., PredictionServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1.PredictionServiceClient`.", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1.PredictionService", - "credentialsType": None, - } - ) - - def predict(self, - request: Optional[Union[prediction_service.PredictRequest, dict]] = None, - *, - name: Optional[str] = None, - payload: Optional[data_items.ExamplePayload] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> prediction_service.PredictResponse: - r"""Perform an online prediction. The prediction result is directly - returned in the response. Available for following ML scenarios, - and their expected request payloads: - - AutoML Vision Classification - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Vision Object Detection - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Natural Language Classification - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Natural Language Entity Extraction - - - A TextSnippet up to 10,000 characters, UTF-8 NFC encoded or a - document in .PDF, .TIF or .TIFF format with size upto 20MB. - - AutoML Natural Language Sentiment Analysis - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Translation - - - A TextSnippet up to 25,000 characters, UTF-8 encoded. - - AutoML Tables - - - A row with column values matching the columns of the model, - up to 5MB. Not available for FORECASTING ``prediction_type``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_predict(): - # Create a client - client = automl_v1.PredictionServiceClient() - - # Initialize request argument(s) - payload = automl_v1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = client.predict(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.PredictRequest, dict]): - The request object. Request message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - name (str): - Required. Name of the model requested - to serve the prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - payload (google.cloud.automl_v1.types.ExamplePayload): - Required. Payload to perform a - prediction on. The payload must match - the problem type that the model was - trained to solve. - - This corresponds to the ``payload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (MutableMapping[str, str]): - Additional domain-specific parameters, any string must - be up to 25000 characters long. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for an image, it will - only produce results that have at least this confidence - score. The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects - on the image, it will only produce bounding boxes which - have at least this confidence score. Value in 0 to 1 - range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number - of bounding boxes returned. The default is 100. The - number of returned bounding boxes might be limited by - the server. - - AutoML Tables - - ``feature_importance`` : (boolean) Whether - [feature_importance][google.cloud.automl.v1.TablesModelColumnInfo.feature_importance] - is populated in the returned list of - [TablesAnnotation][google.cloud.automl.v1.TablesAnnotation] - objects. The default is false. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1.types.PredictResponse: - Response message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, payload, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.PredictRequest): - request = prediction_service.PredictRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if payload is not None: - request.payload = payload - if params is not None: - request.params = params - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_predict(self, - request: Optional[Union[prediction_service.BatchPredictRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.BatchPredictInputConfig] = None, - output_config: Optional[io.BatchPredictOutputConfig] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML scenarios: - - - AutoML Vision Classification - - AutoML Vision Object Detection - - AutoML Video Intelligence Classification - - AutoML Video Intelligence Object Tracking \* AutoML Natural - Language Classification - - AutoML Natural Language Entity Extraction - - AutoML Natural Language Sentiment Analysis - - AutoML Tables - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1 - - def sample_batch_predict(): - # Create a client - client = automl_v1.PredictionServiceClient() - - # Initialize request argument(s) - input_config = automl_v1.BatchPredictInputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - output_config = automl_v1.BatchPredictOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.BatchPredictRequest( - name="name_value", - input_config=input_config, - output_config=output_config, - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1.types.BatchPredictRequest, dict]): - The request object. Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - name (str): - Required. Name of the model requested - to serve the batch prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (google.cloud.automl_v1.types.BatchPredictInputConfig): - Required. The input configuration for - batch prediction. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1.types.BatchPredictOutputConfig): - Required. The Configuration - specifying where output predictions - should be written. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (MutableMapping[str, str]): - Additional domain-specific parameters for the - predictions, any string must be up to 25000 characters - long. - - AutoML Natural Language Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for a text snippet, it - will only produce results that have at least this - confidence score. The default is 0.5. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for an image, it will - only produce results that have at least this confidence - score. The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects - on the image, it will only produce bounding boxes which - have at least this confidence score. Value in 0 to 1 - range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number - of bounding boxes returned per image. The default is - 100, the number of bounding boxes returned might be - limited by the server. AutoML Video Intelligence - Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. - When the model makes predictions for a video, it will - only produce results that have at least this confidence - score. The default is 0.5. - - ``segment_classification`` : (boolean) Set to true to - request segment-level classification. AutoML Video - Intelligence returns labels and their confidence scores - for the entire segment of the video that user specified - in the request configuration. The default is true. - - ``shot_classification`` : (boolean) Set to true to - request shot-level classification. AutoML Video - Intelligence determines the boundaries for each camera - shot in the entire segment of the video that user - specified in the request configuration. AutoML Video - Intelligence then returns labels and their confidence - scores for each detected shot, along with the start and - end time of the shot. The default is false. - - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on - training data, but there are no metrics provided to - describe that quality. - - ``1s_interval_classification`` : (boolean) Set to true - to request classification for a video at one-second - intervals. AutoML Video Intelligence returns labels and - their confidence scores for each second of the entire - segment of the video that user specified in the request - configuration. The default is false. - - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on - training data, but there are no metrics provided to - describe that quality. - - AutoML Video Intelligence Object Tracking - - ``score_threshold`` : (float) When Model detects objects - on video frames, it will only produce bounding boxes - which have at least this confidence score. Value in 0 to - 1 range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number - of bounding boxes returned per image. The default is - 100, the number of bounding boxes returned might be - limited by the server. - - ``min_bounding_box_size`` : (float) Only bounding boxes - with shortest edge at least that long as a relative - value of video frame size are returned. Value in 0 to 1 - range. Default is 0. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.automl_v1.types.BatchPredictResult` Result of the Batch Predict. This message is returned in - [response][google.longrunning.Operation.response] of - the operation returned by the - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config, output_config, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.BatchPredictRequest): - request = prediction_service.BatchPredictRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - if output_config is not None: - request.output_config = output_config - if params is not None: - request.params = params - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - prediction_service.BatchPredictResult, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "PredictionServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PredictionServiceClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/README.rst b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/README.rst deleted file mode 100644 index 504aaca0a144..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`PredictionServiceTransport` is the ABC for all transports. -- public child `PredictionServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `PredictionServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BasePredictionServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `PredictionServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/__init__.py deleted file mode 100644 index 5202ac1c1205..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PredictionServiceTransport -from .grpc import PredictionServiceGrpcTransport -from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport -from .rest import PredictionServiceRestTransport -from .rest import PredictionServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] -_transport_registry['grpc'] = PredictionServiceGrpcTransport -_transport_registry['grpc_asyncio'] = PredictionServiceGrpcAsyncIOTransport -_transport_registry['rest'] = PredictionServiceRestTransport - -__all__ = ( - 'PredictionServiceTransport', - 'PredictionServiceGrpcTransport', - 'PredictionServiceGrpcAsyncIOTransport', - 'PredictionServiceRestTransport', - 'PredictionServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/base.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/base.py deleted file mode 100644 index e977c351c2e1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/base.py +++ /dev/null @@ -1,175 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.automl_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.automl_v1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class PredictionServiceTransport(abc.ABC): - """Abstract transport class for PredictionService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'automl.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.predict: gapic_v1.method.wrap_method( - self.predict, - default_timeout=60.0, - client_info=client_info, - ), - self.batch_predict: gapic_v1.method.wrap_method( - self.batch_predict, - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - Union[ - prediction_service.PredictResponse, - Awaitable[prediction_service.PredictResponse] - ]]: - raise NotImplementedError() - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PredictionServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/grpc.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/grpc.py deleted file mode 100644 index 71dfd7e5b441..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/grpc.py +++ /dev/null @@ -1,450 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.automl_v1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class PredictionServiceGrpcTransport(PredictionServiceTransport): - """gRPC backend transport for PredictionService. - - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - prediction_service.PredictResponse]: - r"""Return a callable for the predict method over gRPC. - - Perform an online prediction. The prediction result is directly - returned in the response. Available for following ML scenarios, - and their expected request payloads: - - AutoML Vision Classification - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Vision Object Detection - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Natural Language Classification - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Natural Language Entity Extraction - - - A TextSnippet up to 10,000 characters, UTF-8 NFC encoded or a - document in .PDF, .TIF or .TIFF format with size upto 20MB. - - AutoML Natural Language Sentiment Analysis - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Translation - - - A TextSnippet up to 25,000 characters, UTF-8 encoded. - - AutoML Tables - - - A row with column values matching the columns of the model, - up to 5MB. Not available for FORECASTING ``prediction_type``. - - Returns: - Callable[[~.PredictRequest], - ~.PredictResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'predict' not in self._stubs: - self._stubs['predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.PredictionService/Predict', - request_serializer=prediction_service.PredictRequest.serialize, - response_deserializer=prediction_service.PredictResponse.deserialize, - ) - return self._stubs['predict'] - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - operations_pb2.Operation]: - r"""Return a callable for the batch predict method over gRPC. - - Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML scenarios: - - - AutoML Vision Classification - - AutoML Vision Object Detection - - AutoML Video Intelligence Classification - - AutoML Video Intelligence Object Tracking \* AutoML Natural - Language Classification - - AutoML Natural Language Entity Extraction - - AutoML Natural Language Sentiment Analysis - - AutoML Tables - - Returns: - Callable[[~.BatchPredictRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_predict' not in self._stubs: - self._stubs['batch_predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.PredictionService/BatchPredict', - request_serializer=prediction_service.BatchPredictRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_predict'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PredictionServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/grpc_asyncio.py deleted file mode 100644 index f3d38f33eef6..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,475 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.automl_v1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import PredictionServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): - """gRPC AsyncIO backend transport for PredictionService. - - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - Awaitable[prediction_service.PredictResponse]]: - r"""Return a callable for the predict method over gRPC. - - Perform an online prediction. The prediction result is directly - returned in the response. Available for following ML scenarios, - and their expected request payloads: - - AutoML Vision Classification - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Vision Object Detection - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to - 30MB. - - AutoML Natural Language Classification - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Natural Language Entity Extraction - - - A TextSnippet up to 10,000 characters, UTF-8 NFC encoded or a - document in .PDF, .TIF or .TIFF format with size upto 20MB. - - AutoML Natural Language Sentiment Analysis - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a - document in .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Translation - - - A TextSnippet up to 25,000 characters, UTF-8 encoded. - - AutoML Tables - - - A row with column values matching the columns of the model, - up to 5MB. Not available for FORECASTING ``prediction_type``. - - Returns: - Callable[[~.PredictRequest], - Awaitable[~.PredictResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'predict' not in self._stubs: - self._stubs['predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.PredictionService/Predict', - request_serializer=prediction_service.PredictRequest.serialize, - response_deserializer=prediction_service.PredictResponse.deserialize, - ) - return self._stubs['predict'] - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the batch predict method over gRPC. - - Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML scenarios: - - - AutoML Vision Classification - - AutoML Vision Object Detection - - AutoML Video Intelligence Classification - - AutoML Video Intelligence Object Tracking \* AutoML Natural - Language Classification - - AutoML Natural Language Entity Extraction - - AutoML Natural Language Sentiment Analysis - - AutoML Tables - - Returns: - Callable[[~.BatchPredictRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_predict' not in self._stubs: - self._stubs['batch_predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1.PredictionService/BatchPredict', - request_serializer=prediction_service.BatchPredictRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_predict'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.predict: self._wrap_method( - self.predict, - default_timeout=60.0, - client_info=client_info, - ), - self.batch_predict: self._wrap_method( - self.batch_predict, - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'PredictionServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/rest.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/rest.py deleted file mode 100644 index a385c0e60633..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/rest.py +++ /dev/null @@ -1,592 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.automl_v1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BasePredictionServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class PredictionServiceRestInterceptor: - """Interceptor for PredictionService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the PredictionServiceRestTransport. - - .. code-block:: python - class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): - def pre_batch_predict(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_predict(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_predict(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_predict(self, response): - logging.log(f"Received response: {response}") - return response - - transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) - client = PredictionServiceClient(transport=transport) - - - """ - def pre_batch_predict(self, request: prediction_service.BatchPredictRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[prediction_service.BatchPredictRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_predict - - Override in a subclass to manipulate the request or metadata - before they are sent to the PredictionService server. - """ - return request, metadata - - def post_batch_predict(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for batch_predict - - DEPRECATED. Please use the `post_batch_predict_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the PredictionService server but before - it is returned to user code. This `post_batch_predict` interceptor runs - before the `post_batch_predict_with_metadata` interceptor. - """ - return response - - def post_batch_predict_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_predict - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the PredictionService server but before it is returned to user code. - - We recommend only using this `post_batch_predict_with_metadata` - interceptor in new development instead of the `post_batch_predict` interceptor. - When both interceptors are used, this `post_batch_predict_with_metadata` interceptor runs after the - `post_batch_predict` interceptor. The (possibly modified) response returned by - `post_batch_predict` will be passed to - `post_batch_predict_with_metadata`. - """ - return response, metadata - - def pre_predict(self, request: prediction_service.PredictRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[prediction_service.PredictRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for predict - - Override in a subclass to manipulate the request or metadata - before they are sent to the PredictionService server. - """ - return request, metadata - - def post_predict(self, response: prediction_service.PredictResponse) -> prediction_service.PredictResponse: - """Post-rpc interceptor for predict - - DEPRECATED. Please use the `post_predict_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the PredictionService server but before - it is returned to user code. This `post_predict` interceptor runs - before the `post_predict_with_metadata` interceptor. - """ - return response - - def post_predict_with_metadata(self, response: prediction_service.PredictResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[prediction_service.PredictResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for predict - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the PredictionService server but before it is returned to user code. - - We recommend only using this `post_predict_with_metadata` - interceptor in new development instead of the `post_predict` interceptor. - When both interceptors are used, this `post_predict_with_metadata` interceptor runs after the - `post_predict` interceptor. The (possibly modified) response returned by - `post_predict` will be passed to - `post_predict_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class PredictionServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: PredictionServiceRestInterceptor - - -class PredictionServiceRestTransport(_BasePredictionServiceRestTransport): - """REST backend synchronous transport for PredictionService. - - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or dash-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[PredictionServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or PredictionServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - 'google.longrunning.Operations.WaitOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _BatchPredict(_BasePredictionServiceRestTransport._BaseBatchPredict, PredictionServiceRestStub): - def __hash__(self): - return hash("PredictionServiceRestTransport.BatchPredict") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: prediction_service.BatchPredictRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the batch predict method over HTTP. - - Args: - request (~.prediction_service.BatchPredictRequest): - The request object. Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BasePredictionServiceRestTransport._BaseBatchPredict._get_http_options() - - request, metadata = self._interceptor.pre_batch_predict(request, metadata) - transcoded_request = _BasePredictionServiceRestTransport._BaseBatchPredict._get_transcoded_request(http_options, request) - - body = _BasePredictionServiceRestTransport._BaseBatchPredict._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BasePredictionServiceRestTransport._BaseBatchPredict._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.PredictionServiceClient.BatchPredict", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": "BatchPredict", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = PredictionServiceRestTransport._BatchPredict._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_predict(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_predict_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.PredictionServiceClient.batch_predict", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": "BatchPredict", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Predict(_BasePredictionServiceRestTransport._BasePredict, PredictionServiceRestStub): - def __hash__(self): - return hash("PredictionServiceRestTransport.Predict") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: prediction_service.PredictRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> prediction_service.PredictResponse: - r"""Call the predict method over HTTP. - - Args: - request (~.prediction_service.PredictRequest): - The request object. Request message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.prediction_service.PredictResponse: - Response message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - - """ - - http_options = _BasePredictionServiceRestTransport._BasePredict._get_http_options() - - request, metadata = self._interceptor.pre_predict(request, metadata) - transcoded_request = _BasePredictionServiceRestTransport._BasePredict._get_transcoded_request(http_options, request) - - body = _BasePredictionServiceRestTransport._BasePredict._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BasePredictionServiceRestTransport._BasePredict._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1.PredictionServiceClient.Predict", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": "Predict", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = PredictionServiceRestTransport._Predict._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = prediction_service.PredictResponse() - pb_resp = prediction_service.PredictResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_predict(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_predict_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = prediction_service.PredictResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1.PredictionServiceClient.predict", - extra = { - "serviceName": "google.cloud.automl.v1.PredictionService", - "rpcName": "Predict", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchPredict(self._session, self._host, self._interceptor) # type: ignore - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - prediction_service.PredictResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Predict(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'PredictionServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/rest_base.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/rest_base.py deleted file mode 100644 index 8ff44ca2d571..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/services/prediction_service/transports/rest_base.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.automl_v1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BasePredictionServiceRestTransport(PredictionServiceTransport): - """Base REST backend transport for PredictionService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseBatchPredict: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/models/*}:batchPredict', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = prediction_service.BatchPredictRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BasePredictionServiceRestTransport._BaseBatchPredict._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BasePredict: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/models/*}:predict', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = prediction_service.PredictRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BasePredictionServiceRestTransport._BasePredict._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BasePredictionServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/__init__.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/__init__.py deleted file mode 100644 index b0a3f302c37e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/__init__.py +++ /dev/null @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .annotation_payload import ( - AnnotationPayload, -) -from .annotation_spec import ( - AnnotationSpec, -) -from .classification import ( - ClassificationAnnotation, - ClassificationEvaluationMetrics, - ClassificationType, -) -from .data_items import ( - Document, - DocumentDimensions, - ExamplePayload, - Image, - TextSnippet, -) -from .dataset import ( - Dataset, -) -from .detection import ( - BoundingBoxMetricsEntry, - ImageObjectDetectionAnnotation, - ImageObjectDetectionEvaluationMetrics, -) -from .geometry import ( - BoundingPoly, - NormalizedVertex, -) -from .image import ( - ImageClassificationDatasetMetadata, - ImageClassificationModelDeploymentMetadata, - ImageClassificationModelMetadata, - ImageObjectDetectionDatasetMetadata, - ImageObjectDetectionModelDeploymentMetadata, - ImageObjectDetectionModelMetadata, -) -from .io import ( - BatchPredictInputConfig, - BatchPredictOutputConfig, - DocumentInputConfig, - GcsDestination, - GcsSource, - InputConfig, - ModelExportOutputConfig, - OutputConfig, -) -from .model import ( - Model, -) -from .model_evaluation import ( - ModelEvaluation, -) -from .operations import ( - BatchPredictOperationMetadata, - CreateDatasetOperationMetadata, - CreateModelOperationMetadata, - DeleteOperationMetadata, - DeployModelOperationMetadata, - ExportDataOperationMetadata, - ExportModelOperationMetadata, - ImportDataOperationMetadata, - OperationMetadata, - UndeployModelOperationMetadata, -) -from .prediction_service import ( - BatchPredictRequest, - BatchPredictResult, - PredictRequest, - PredictResponse, -) -from .service import ( - CreateDatasetRequest, - CreateModelRequest, - DeleteDatasetRequest, - DeleteModelRequest, - DeployModelRequest, - ExportDataRequest, - ExportModelRequest, - GetAnnotationSpecRequest, - GetDatasetRequest, - GetModelEvaluationRequest, - GetModelRequest, - ImportDataRequest, - ListDatasetsRequest, - ListDatasetsResponse, - ListModelEvaluationsRequest, - ListModelEvaluationsResponse, - ListModelsRequest, - ListModelsResponse, - UndeployModelRequest, - UpdateDatasetRequest, - UpdateModelRequest, -) -from .text import ( - TextClassificationDatasetMetadata, - TextClassificationModelMetadata, - TextExtractionDatasetMetadata, - TextExtractionModelMetadata, - TextSentimentDatasetMetadata, - TextSentimentModelMetadata, -) -from .text_extraction import ( - TextExtractionAnnotation, - TextExtractionEvaluationMetrics, -) -from .text_segment import ( - TextSegment, -) -from .text_sentiment import ( - TextSentimentAnnotation, - TextSentimentEvaluationMetrics, -) -from .translation import ( - TranslationAnnotation, - TranslationDatasetMetadata, - TranslationEvaluationMetrics, - TranslationModelMetadata, -) - -__all__ = ( - 'AnnotationPayload', - 'AnnotationSpec', - 'ClassificationAnnotation', - 'ClassificationEvaluationMetrics', - 'ClassificationType', - 'Document', - 'DocumentDimensions', - 'ExamplePayload', - 'Image', - 'TextSnippet', - 'Dataset', - 'BoundingBoxMetricsEntry', - 'ImageObjectDetectionAnnotation', - 'ImageObjectDetectionEvaluationMetrics', - 'BoundingPoly', - 'NormalizedVertex', - 'ImageClassificationDatasetMetadata', - 'ImageClassificationModelDeploymentMetadata', - 'ImageClassificationModelMetadata', - 'ImageObjectDetectionDatasetMetadata', - 'ImageObjectDetectionModelDeploymentMetadata', - 'ImageObjectDetectionModelMetadata', - 'BatchPredictInputConfig', - 'BatchPredictOutputConfig', - 'DocumentInputConfig', - 'GcsDestination', - 'GcsSource', - 'InputConfig', - 'ModelExportOutputConfig', - 'OutputConfig', - 'Model', - 'ModelEvaluation', - 'BatchPredictOperationMetadata', - 'CreateDatasetOperationMetadata', - 'CreateModelOperationMetadata', - 'DeleteOperationMetadata', - 'DeployModelOperationMetadata', - 'ExportDataOperationMetadata', - 'ExportModelOperationMetadata', - 'ImportDataOperationMetadata', - 'OperationMetadata', - 'UndeployModelOperationMetadata', - 'BatchPredictRequest', - 'BatchPredictResult', - 'PredictRequest', - 'PredictResponse', - 'CreateDatasetRequest', - 'CreateModelRequest', - 'DeleteDatasetRequest', - 'DeleteModelRequest', - 'DeployModelRequest', - 'ExportDataRequest', - 'ExportModelRequest', - 'GetAnnotationSpecRequest', - 'GetDatasetRequest', - 'GetModelEvaluationRequest', - 'GetModelRequest', - 'ImportDataRequest', - 'ListDatasetsRequest', - 'ListDatasetsResponse', - 'ListModelEvaluationsRequest', - 'ListModelEvaluationsResponse', - 'ListModelsRequest', - 'ListModelsResponse', - 'UndeployModelRequest', - 'UpdateDatasetRequest', - 'UpdateModelRequest', - 'TextClassificationDatasetMetadata', - 'TextClassificationModelMetadata', - 'TextExtractionDatasetMetadata', - 'TextExtractionModelMetadata', - 'TextSentimentDatasetMetadata', - 'TextSentimentModelMetadata', - 'TextExtractionAnnotation', - 'TextExtractionEvaluationMetrics', - 'TextSegment', - 'TextSentimentAnnotation', - 'TextSentimentEvaluationMetrics', - 'TranslationAnnotation', - 'TranslationDatasetMetadata', - 'TranslationEvaluationMetrics', - 'TranslationModelMetadata', -) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/annotation_payload.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/annotation_payload.py deleted file mode 100644 index 19311cfac5d5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/annotation_payload.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import classification as gca_classification -from google.cloud.automl_v1.types import detection -from google.cloud.automl_v1.types import text_extraction as gca_text_extraction -from google.cloud.automl_v1.types import text_sentiment as gca_text_sentiment -from google.cloud.automl_v1.types import translation as gca_translation - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'AnnotationPayload', - }, -) - - -class AnnotationPayload(proto.Message): - r"""Contains annotation information that is relevant to AutoML. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation (google.cloud.automl_v1.types.TranslationAnnotation): - Annotation details for translation. - - This field is a member of `oneof`_ ``detail``. - classification (google.cloud.automl_v1.types.ClassificationAnnotation): - Annotation details for content or image - classification. - - This field is a member of `oneof`_ ``detail``. - image_object_detection (google.cloud.automl_v1.types.ImageObjectDetectionAnnotation): - Annotation details for image object - detection. - - This field is a member of `oneof`_ ``detail``. - text_extraction (google.cloud.automl_v1.types.TextExtractionAnnotation): - Annotation details for text extraction. - - This field is a member of `oneof`_ ``detail``. - text_sentiment (google.cloud.automl_v1.types.TextSentimentAnnotation): - Annotation details for text sentiment. - - This field is a member of `oneof`_ ``detail``. - annotation_spec_id (str): - Output only . The resource ID of the - annotation spec that this annotation pertains - to. The annotation spec comes from either an - ancestor dataset, or the dataset that was used - to train the model in use. - display_name (str): - Output only. The value of - [display_name][google.cloud.automl.v1.AnnotationSpec.display_name] - when the model was trained. Because this field returns a - value at model training time, for different models trained - using the same dataset, the returned value could be - different as model owner could update the ``display_name`` - between any two model training. - """ - - translation: gca_translation.TranslationAnnotation = proto.Field( - proto.MESSAGE, - number=2, - oneof='detail', - message=gca_translation.TranslationAnnotation, - ) - classification: gca_classification.ClassificationAnnotation = proto.Field( - proto.MESSAGE, - number=3, - oneof='detail', - message=gca_classification.ClassificationAnnotation, - ) - image_object_detection: detection.ImageObjectDetectionAnnotation = proto.Field( - proto.MESSAGE, - number=4, - oneof='detail', - message=detection.ImageObjectDetectionAnnotation, - ) - text_extraction: gca_text_extraction.TextExtractionAnnotation = proto.Field( - proto.MESSAGE, - number=6, - oneof='detail', - message=gca_text_extraction.TextExtractionAnnotation, - ) - text_sentiment: gca_text_sentiment.TextSentimentAnnotation = proto.Field( - proto.MESSAGE, - number=7, - oneof='detail', - message=gca_text_sentiment.TextSentimentAnnotation, - ) - annotation_spec_id: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/annotation_spec.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/annotation_spec.py deleted file mode 100644 index 3491461dec35..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/annotation_spec.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'AnnotationSpec', - }, -) - - -class AnnotationSpec(proto.Message): - r"""A definition of an annotation spec. - - Attributes: - name (str): - Output only. Resource name of the annotation spec. Form: - 'projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationSpecs/{annotation_spec_id}' - display_name (str): - Required. The name of the annotation spec to show in the - interface. The name can be up to 32 characters long and must - match the regexp ``[a-zA-Z0-9_]+``. - example_count (int): - Output only. The number of examples in the - parent dataset labeled by the annotation spec. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - example_count: int = proto.Field( - proto.INT32, - number=9, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/classification.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/classification.py deleted file mode 100644 index 039be9083425..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/classification.py +++ /dev/null @@ -1,310 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'ClassificationType', - 'ClassificationAnnotation', - 'ClassificationEvaluationMetrics', - }, -) - - -class ClassificationType(proto.Enum): - r"""Type of the classification problem. - - Values: - CLASSIFICATION_TYPE_UNSPECIFIED (0): - An un-set value of this enum. - MULTICLASS (1): - At most one label is allowed per example. - MULTILABEL (2): - Multiple labels are allowed for one example. - """ - CLASSIFICATION_TYPE_UNSPECIFIED = 0 - MULTICLASS = 1 - MULTILABEL = 2 - - -class ClassificationAnnotation(proto.Message): - r"""Contains annotation details specific to classification. - - Attributes: - score (float): - Output only. A confidence estimate between - 0.0 and 1.0. A higher value means greater - confidence that the annotation is positive. If a - user approves an annotation as negative or - positive, the score value remains unchanged. If - a user creates an annotation, the score is 0 for - negative or 1 for positive. - """ - - score: float = proto.Field( - proto.FLOAT, - number=1, - ) - - -class ClassificationEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for classification problems. Note: For - Video Classification this metrics only describe quality of the Video - Classification predictions of "segment_classification" type. - - Attributes: - au_prc (float): - Output only. The Area Under Precision-Recall - Curve metric. Micro-averaged for the overall - evaluation. - au_roc (float): - Output only. The Area Under Receiver - Operating Characteristic curve metric. - Micro-averaged for the overall evaluation. - log_loss (float): - Output only. The Log Loss metric. - confidence_metrics_entry (MutableSequence[google.cloud.automl_v1.types.ClassificationEvaluationMetrics.ConfidenceMetricsEntry]): - Output only. Metrics for each confidence_threshold in - 0.00,0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 and - position_threshold = INT32_MAX_VALUE. ROC and - precision-recall curves, and other aggregated metrics are - derived from them. The confidence metrics entries may also - be supplied for additional values of position_threshold, but - from these no aggregated metrics are computed. - confusion_matrix (google.cloud.automl_v1.types.ClassificationEvaluationMetrics.ConfusionMatrix): - Output only. Confusion matrix of the - evaluation. Only set for MULTICLASS - classification problems where number of labels - is no more than 10. - Only set for model level evaluation, not for - evaluation per label. - annotation_spec_id (MutableSequence[str]): - Output only. The annotation spec ids used for - this evaluation. - """ - - class ConfidenceMetricsEntry(proto.Message): - r"""Metrics for a single confidence threshold. - - Attributes: - confidence_threshold (float): - Output only. Metrics are computed with an - assumption that the model never returns - predictions with score lower than this value. - position_threshold (int): - Output only. Metrics are computed with an assumption that - the model always returns at most this many predictions - (ordered by their score, descendingly), but they all still - need to meet the confidence_threshold. - recall (float): - Output only. Recall (True Positive Rate) for - the given confidence threshold. - precision (float): - Output only. Precision for the given - confidence threshold. - false_positive_rate (float): - Output only. False Positive Rate for the - given confidence threshold. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - recall_at1 (float): - Output only. The Recall (True Positive Rate) - when only considering the label that has the - highest prediction score and not below the - confidence threshold for each example. - precision_at1 (float): - Output only. The precision when only - considering the label that has the highest - prediction score and not below the confidence - threshold for each example. - false_positive_rate_at1 (float): - Output only. The False Positive Rate when - only considering the label that has the highest - prediction score and not below the confidence - threshold for each example. - f1_score_at1 (float): - Output only. The harmonic mean of - [recall_at1][google.cloud.automl.v1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.recall_at1] - and - [precision_at1][google.cloud.automl.v1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.precision_at1]. - true_positive_count (int): - Output only. The number of model created - labels that match a ground truth label. - false_positive_count (int): - Output only. The number of model created - labels that do not match a ground truth label. - false_negative_count (int): - Output only. The number of ground truth - labels that are not matched by a model created - label. - true_negative_count (int): - Output only. The number of labels that were - not created by the model, but if they would, - they would not match a ground truth label. - """ - - confidence_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - position_threshold: int = proto.Field( - proto.INT32, - number=14, - ) - recall: float = proto.Field( - proto.FLOAT, - number=2, - ) - precision: float = proto.Field( - proto.FLOAT, - number=3, - ) - false_positive_rate: float = proto.Field( - proto.FLOAT, - number=8, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=4, - ) - recall_at1: float = proto.Field( - proto.FLOAT, - number=5, - ) - precision_at1: float = proto.Field( - proto.FLOAT, - number=6, - ) - false_positive_rate_at1: float = proto.Field( - proto.FLOAT, - number=9, - ) - f1_score_at1: float = proto.Field( - proto.FLOAT, - number=7, - ) - true_positive_count: int = proto.Field( - proto.INT64, - number=10, - ) - false_positive_count: int = proto.Field( - proto.INT64, - number=11, - ) - false_negative_count: int = proto.Field( - proto.INT64, - number=12, - ) - true_negative_count: int = proto.Field( - proto.INT64, - number=13, - ) - - class ConfusionMatrix(proto.Message): - r"""Confusion matrix of the model running the classification. - - Attributes: - annotation_spec_id (MutableSequence[str]): - Output only. IDs of the annotation specs used in the - confusion matrix. For Tables CLASSIFICATION - [prediction_type][google.cloud.automl.v1p1beta.TablesModelMetadata.prediction_type] - only list of [annotation_spec_display_name-s][] is - populated. - display_name (MutableSequence[str]): - Output only. Display name of the annotation specs used in - the confusion matrix, as they were at the moment of the - evaluation. For Tables CLASSIFICATION - [prediction_type-s][google.cloud.automl.v1p1beta.TablesModelMetadata.prediction_type], - distinct values of the target column at the moment of the - model evaluation are populated here. - row (MutableSequence[google.cloud.automl_v1.types.ClassificationEvaluationMetrics.ConfusionMatrix.Row]): - Output only. Rows in the confusion matrix. The number of - rows is equal to the size of ``annotation_spec_id``. - ``row[i].example_count[j]`` is the number of examples that - have ground truth of the ``annotation_spec_id[i]`` and are - predicted as ``annotation_spec_id[j]`` by the model being - evaluated. - """ - - class Row(proto.Message): - r"""Output only. A row in the confusion matrix. - - Attributes: - example_count (MutableSequence[int]): - Output only. Value of the specific cell in the confusion - matrix. The number of values each row has (i.e. the length - of the row) is equal to the length of the - ``annotation_spec_id`` field or, if that one is not - populated, length of the - [display_name][google.cloud.automl.v1.ClassificationEvaluationMetrics.ConfusionMatrix.display_name] - field. - """ - - example_count: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=1, - ) - - annotation_spec_id: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - display_name: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - row: MutableSequence['ClassificationEvaluationMetrics.ConfusionMatrix.Row'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ClassificationEvaluationMetrics.ConfusionMatrix.Row', - ) - - au_prc: float = proto.Field( - proto.FLOAT, - number=1, - ) - au_roc: float = proto.Field( - proto.FLOAT, - number=6, - ) - log_loss: float = proto.Field( - proto.FLOAT, - number=7, - ) - confidence_metrics_entry: MutableSequence[ConfidenceMetricsEntry] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=ConfidenceMetricsEntry, - ) - confusion_matrix: ConfusionMatrix = proto.Field( - proto.MESSAGE, - number=4, - message=ConfusionMatrix, - ) - annotation_spec_id: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/data_items.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/data_items.py deleted file mode 100644 index 6544f085fe52..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/data_items.py +++ /dev/null @@ -1,337 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import geometry -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import text_segment as gca_text_segment - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'Image', - 'TextSnippet', - 'DocumentDimensions', - 'Document', - 'ExamplePayload', - }, -) - - -class Image(proto.Message): - r"""A representation of an image. - Only images up to 30MB in size are supported. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image_bytes (bytes): - Image content represented as a stream of bytes. Note: As - with all ``bytes`` fields, protobuffers use a pure binary - representation, whereas JSON representations use base64. - - This field is a member of `oneof`_ ``data``. - thumbnail_uri (str): - Output only. HTTP URI to the thumbnail image. - """ - - image_bytes: bytes = proto.Field( - proto.BYTES, - number=1, - oneof='data', - ) - thumbnail_uri: str = proto.Field( - proto.STRING, - number=4, - ) - - -class TextSnippet(proto.Message): - r"""A representation of a text snippet. - - Attributes: - content (str): - Required. The content of the text snippet as - a string. Up to 250000 characters long. - mime_type (str): - Optional. The format of - [content][google.cloud.automl.v1.TextSnippet.content]. - Currently the only two allowed values are "text/html" and - "text/plain". If left blank, the format is automatically - determined from the type of the uploaded - [content][google.cloud.automl.v1.TextSnippet.content]. - content_uri (str): - Output only. HTTP URI where you can download - the content. - """ - - content: str = proto.Field( - proto.STRING, - number=1, - ) - mime_type: str = proto.Field( - proto.STRING, - number=2, - ) - content_uri: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DocumentDimensions(proto.Message): - r"""Message that describes dimension of a document. - - Attributes: - unit (google.cloud.automl_v1.types.DocumentDimensions.DocumentDimensionUnit): - Unit of the dimension. - width (float): - Width value of the document, works together - with the unit. - height (float): - Height value of the document, works together - with the unit. - """ - class DocumentDimensionUnit(proto.Enum): - r"""Unit of the document dimension. - - Values: - DOCUMENT_DIMENSION_UNIT_UNSPECIFIED (0): - Should not be used. - INCH (1): - Document dimension is measured in inches. - CENTIMETER (2): - Document dimension is measured in - centimeters. - POINT (3): - Document dimension is measured in points. 72 - points = 1 inch. - """ - DOCUMENT_DIMENSION_UNIT_UNSPECIFIED = 0 - INCH = 1 - CENTIMETER = 2 - POINT = 3 - - unit: DocumentDimensionUnit = proto.Field( - proto.ENUM, - number=1, - enum=DocumentDimensionUnit, - ) - width: float = proto.Field( - proto.FLOAT, - number=2, - ) - height: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class Document(proto.Message): - r"""A structured text document e.g. a PDF. - - Attributes: - input_config (google.cloud.automl_v1.types.DocumentInputConfig): - An input config specifying the content of the - document. - document_text (google.cloud.automl_v1.types.TextSnippet): - The plain text version of this document. - layout (MutableSequence[google.cloud.automl_v1.types.Document.Layout]): - Describes the layout of the document. Sorted by - [page_number][]. - document_dimensions (google.cloud.automl_v1.types.DocumentDimensions): - The dimensions of the page in the document. - page_count (int): - Number of pages in the document. - """ - - class Layout(proto.Message): - r"""Describes the layout information of a - [text_segment][google.cloud.automl.v1.Document.Layout.text_segment] - in the document. - - Attributes: - text_segment (google.cloud.automl_v1.types.TextSegment): - Text Segment that represents a segment in - [document_text][google.cloud.automl.v1p1beta.Document.document_text]. - page_number (int): - Page number of the - [text_segment][google.cloud.automl.v1.Document.Layout.text_segment] - in the original document, starts from 1. - bounding_poly (google.cloud.automl_v1.types.BoundingPoly): - The position of the - [text_segment][google.cloud.automl.v1.Document.Layout.text_segment] - in the page. Contains exactly 4 - [normalized_vertices][google.cloud.automl.v1p1beta.BoundingPoly.normalized_vertices] - and they are connected by edges in the order provided, which - will represent a rectangle parallel to the frame. The - [NormalizedVertex-s][google.cloud.automl.v1p1beta.NormalizedVertex] - are relative to the page. Coordinates are based on top-left - as point (0,0). - text_segment_type (google.cloud.automl_v1.types.Document.Layout.TextSegmentType): - The type of the - [text_segment][google.cloud.automl.v1.Document.Layout.text_segment] - in document. - """ - class TextSegmentType(proto.Enum): - r"""The type of TextSegment in the context of the original - document. - - Values: - TEXT_SEGMENT_TYPE_UNSPECIFIED (0): - Should not be used. - TOKEN (1): - The text segment is a token. e.g. word. - PARAGRAPH (2): - The text segment is a paragraph. - FORM_FIELD (3): - The text segment is a form field. - FORM_FIELD_NAME (4): - The text segment is the name part of a form field. It will - be treated as child of another FORM_FIELD TextSegment if its - span is subspan of another TextSegment with type FORM_FIELD. - FORM_FIELD_CONTENTS (5): - The text segment is the text content part of a form field. - It will be treated as child of another FORM_FIELD - TextSegment if its span is subspan of another TextSegment - with type FORM_FIELD. - TABLE (6): - The text segment is a whole table, including - headers, and all rows. - TABLE_HEADER (7): - The text segment is a table's headers. It - will be treated as child of another TABLE - TextSegment if its span is subspan of another - TextSegment with type TABLE. - TABLE_ROW (8): - The text segment is a row in table. It will - be treated as child of another TABLE TextSegment - if its span is subspan of another TextSegment - with type TABLE. - TABLE_CELL (9): - The text segment is a cell in table. It will be treated as - child of another TABLE_ROW TextSegment if its span is - subspan of another TextSegment with type TABLE_ROW. - """ - TEXT_SEGMENT_TYPE_UNSPECIFIED = 0 - TOKEN = 1 - PARAGRAPH = 2 - FORM_FIELD = 3 - FORM_FIELD_NAME = 4 - FORM_FIELD_CONTENTS = 5 - TABLE = 6 - TABLE_HEADER = 7 - TABLE_ROW = 8 - TABLE_CELL = 9 - - text_segment: gca_text_segment.TextSegment = proto.Field( - proto.MESSAGE, - number=1, - message=gca_text_segment.TextSegment, - ) - page_number: int = proto.Field( - proto.INT32, - number=2, - ) - bounding_poly: geometry.BoundingPoly = proto.Field( - proto.MESSAGE, - number=3, - message=geometry.BoundingPoly, - ) - text_segment_type: 'Document.Layout.TextSegmentType' = proto.Field( - proto.ENUM, - number=4, - enum='Document.Layout.TextSegmentType', - ) - - input_config: io.DocumentInputConfig = proto.Field( - proto.MESSAGE, - number=1, - message=io.DocumentInputConfig, - ) - document_text: 'TextSnippet' = proto.Field( - proto.MESSAGE, - number=2, - message='TextSnippet', - ) - layout: MutableSequence[Layout] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Layout, - ) - document_dimensions: 'DocumentDimensions' = proto.Field( - proto.MESSAGE, - number=4, - message='DocumentDimensions', - ) - page_count: int = proto.Field( - proto.INT32, - number=5, - ) - - -class ExamplePayload(proto.Message): - r"""Example data used for training or prediction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image (google.cloud.automl_v1.types.Image): - Example image. - - This field is a member of `oneof`_ ``payload``. - text_snippet (google.cloud.automl_v1.types.TextSnippet): - Example text. - - This field is a member of `oneof`_ ``payload``. - document (google.cloud.automl_v1.types.Document): - Example document. - - This field is a member of `oneof`_ ``payload``. - """ - - image: 'Image' = proto.Field( - proto.MESSAGE, - number=1, - oneof='payload', - message='Image', - ) - text_snippet: 'TextSnippet' = proto.Field( - proto.MESSAGE, - number=2, - oneof='payload', - message='TextSnippet', - ) - document: 'Document' = proto.Field( - proto.MESSAGE, - number=4, - oneof='payload', - message='Document', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/dataset.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/dataset.py deleted file mode 100644 index b655e0aa4925..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/dataset.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import image -from google.cloud.automl_v1.types import text -from google.cloud.automl_v1.types import translation -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'Dataset', - }, -) - - -class Dataset(proto.Message): - r"""A workspace for solving a single, particular machine learning - (ML) problem. A workspace contains examples that may be - annotated. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation_dataset_metadata (google.cloud.automl_v1.types.TranslationDatasetMetadata): - Metadata for a dataset used for translation. - - This field is a member of `oneof`_ ``dataset_metadata``. - image_classification_dataset_metadata (google.cloud.automl_v1.types.ImageClassificationDatasetMetadata): - Metadata for a dataset used for image - classification. - - This field is a member of `oneof`_ ``dataset_metadata``. - text_classification_dataset_metadata (google.cloud.automl_v1.types.TextClassificationDatasetMetadata): - Metadata for a dataset used for text - classification. - - This field is a member of `oneof`_ ``dataset_metadata``. - image_object_detection_dataset_metadata (google.cloud.automl_v1.types.ImageObjectDetectionDatasetMetadata): - Metadata for a dataset used for image object - detection. - - This field is a member of `oneof`_ ``dataset_metadata``. - text_extraction_dataset_metadata (google.cloud.automl_v1.types.TextExtractionDatasetMetadata): - Metadata for a dataset used for text - extraction. - - This field is a member of `oneof`_ ``dataset_metadata``. - text_sentiment_dataset_metadata (google.cloud.automl_v1.types.TextSentimentDatasetMetadata): - Metadata for a dataset used for text - sentiment. - - This field is a member of `oneof`_ ``dataset_metadata``. - name (str): - Output only. The resource name of the dataset. Form: - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`` - display_name (str): - Required. The name of the dataset to show in the interface. - The name can be up to 32 characters long and can consist - only of ASCII Latin letters A-Z and a-z, underscores (_), - and ASCII digits 0-9. - description (str): - User-provided description of the dataset. The - description can be up to 25000 characters long. - example_count (int): - Output only. The number of examples in the - dataset. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when this dataset was - created. - etag (str): - Used to perform consistent read-modify-write - updates. If not set, a blind "overwrite" update - happens. - labels (MutableMapping[str, str]): - Optional. The labels with user-defined - metadata to organize your dataset. - Label keys and values can be no longer than 64 - characters (Unicode codepoints), can only - contain lowercase letters, numeric characters, - underscores and dashes. International characters - are allowed. Label values are optional. Label - keys must start with a letter. - - See https://goo.gl/xmQnxf for more information - on and examples of labels. - """ - - translation_dataset_metadata: translation.TranslationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=23, - oneof='dataset_metadata', - message=translation.TranslationDatasetMetadata, - ) - image_classification_dataset_metadata: image.ImageClassificationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=24, - oneof='dataset_metadata', - message=image.ImageClassificationDatasetMetadata, - ) - text_classification_dataset_metadata: text.TextClassificationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=25, - oneof='dataset_metadata', - message=text.TextClassificationDatasetMetadata, - ) - image_object_detection_dataset_metadata: image.ImageObjectDetectionDatasetMetadata = proto.Field( - proto.MESSAGE, - number=26, - oneof='dataset_metadata', - message=image.ImageObjectDetectionDatasetMetadata, - ) - text_extraction_dataset_metadata: text.TextExtractionDatasetMetadata = proto.Field( - proto.MESSAGE, - number=28, - oneof='dataset_metadata', - message=text.TextExtractionDatasetMetadata, - ) - text_sentiment_dataset_metadata: text.TextSentimentDatasetMetadata = proto.Field( - proto.MESSAGE, - number=30, - oneof='dataset_metadata', - message=text.TextSentimentDatasetMetadata, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - example_count: int = proto.Field( - proto.INT32, - number=21, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - etag: str = proto.Field( - proto.STRING, - number=17, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=39, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/detection.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/detection.py deleted file mode 100644 index c15b57a0e55f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/detection.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import geometry - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'ImageObjectDetectionAnnotation', - 'BoundingBoxMetricsEntry', - 'ImageObjectDetectionEvaluationMetrics', - }, -) - - -class ImageObjectDetectionAnnotation(proto.Message): - r"""Annotation details for image object detection. - - Attributes: - bounding_box (google.cloud.automl_v1.types.BoundingPoly): - Output only. The rectangle representing the - object location. - score (float): - Output only. The confidence that this annotation is positive - for the parent example, value in [0, 1], higher means higher - positivity confidence. - """ - - bounding_box: geometry.BoundingPoly = proto.Field( - proto.MESSAGE, - number=1, - message=geometry.BoundingPoly, - ) - score: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class BoundingBoxMetricsEntry(proto.Message): - r"""Bounding box matching model metrics for a single - intersection-over-union threshold and multiple label match - confidence thresholds. - - Attributes: - iou_threshold (float): - Output only. The intersection-over-union - threshold value used to compute this metrics - entry. - mean_average_precision (float): - Output only. The mean average precision, most often close to - au_prc. - confidence_metrics_entries (MutableSequence[google.cloud.automl_v1.types.BoundingBoxMetricsEntry.ConfidenceMetricsEntry]): - Output only. Metrics for each label-match - confidence_threshold from - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99. Precision-recall - curve is derived from them. - """ - - class ConfidenceMetricsEntry(proto.Message): - r"""Metrics for a single confidence threshold. - - Attributes: - confidence_threshold (float): - Output only. The confidence threshold value - used to compute the metrics. - recall (float): - Output only. Recall under the given - confidence threshold. - precision (float): - Output only. Precision under the given - confidence threshold. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - """ - - confidence_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - recall: float = proto.Field( - proto.FLOAT, - number=2, - ) - precision: float = proto.Field( - proto.FLOAT, - number=3, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=4, - ) - - iou_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - mean_average_precision: float = proto.Field( - proto.FLOAT, - number=2, - ) - confidence_metrics_entries: MutableSequence[ConfidenceMetricsEntry] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=ConfidenceMetricsEntry, - ) - - -class ImageObjectDetectionEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for image object detection problems. - Evaluates prediction quality of labeled bounding boxes. - - Attributes: - evaluated_bounding_box_count (int): - Output only. The total number of bounding - boxes (i.e. summed over all images) the ground - truth used to create this evaluation had. - bounding_box_metrics_entries (MutableSequence[google.cloud.automl_v1.types.BoundingBoxMetricsEntry]): - Output only. The bounding boxes match metrics - for each Intersection-over-union threshold - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 and each - label confidence threshold - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 pair. - bounding_box_mean_average_precision (float): - Output only. The single metric for bounding boxes - evaluation: the mean_average_precision averaged over all - bounding_box_metrics_entries. - """ - - evaluated_bounding_box_count: int = proto.Field( - proto.INT32, - number=1, - ) - bounding_box_metrics_entries: MutableSequence['BoundingBoxMetricsEntry'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='BoundingBoxMetricsEntry', - ) - bounding_box_mean_average_precision: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/geometry.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/geometry.py deleted file mode 100644 index 5df888288dbc..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/geometry.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'NormalizedVertex', - 'BoundingPoly', - }, -) - - -class NormalizedVertex(proto.Message): - r"""A vertex represents a 2D point in the image. - The normalized vertex coordinates are between 0 to 1 fractions - relative to the original plane (image, video). E.g. if the plane - (e.g. whole image) would have size 10 x 20 then a point with - normalized coordinates (0.1, 0.3) would be at the position (1, - 6) on that plane. - - Attributes: - x (float): - Required. Horizontal coordinate. - y (float): - Required. Vertical coordinate. - """ - - x: float = proto.Field( - proto.FLOAT, - number=1, - ) - y: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class BoundingPoly(proto.Message): - r"""A bounding polygon of a detected object on a plane. On output both - vertices and normalized_vertices are provided. The polygon is formed - by connecting vertices in the order they are listed. - - Attributes: - normalized_vertices (MutableSequence[google.cloud.automl_v1.types.NormalizedVertex]): - Output only . The bounding polygon normalized - vertices. - """ - - normalized_vertices: MutableSequence['NormalizedVertex'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='NormalizedVertex', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/image.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/image.py deleted file mode 100644 index ec7cb266b036..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/image.py +++ /dev/null @@ -1,318 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import classification - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'ImageClassificationDatasetMetadata', - 'ImageObjectDetectionDatasetMetadata', - 'ImageClassificationModelMetadata', - 'ImageObjectDetectionModelMetadata', - 'ImageClassificationModelDeploymentMetadata', - 'ImageObjectDetectionModelDeploymentMetadata', - }, -) - - -class ImageClassificationDatasetMetadata(proto.Message): - r"""Dataset metadata that is specific to image classification. - - Attributes: - classification_type (google.cloud.automl_v1.types.ClassificationType): - Required. Type of the classification problem. - """ - - classification_type: classification.ClassificationType = proto.Field( - proto.ENUM, - number=1, - enum=classification.ClassificationType, - ) - - -class ImageObjectDetectionDatasetMetadata(proto.Message): - r"""Dataset metadata specific to image object detection. - """ - - -class ImageClassificationModelMetadata(proto.Message): - r"""Model metadata for image classification. - - Attributes: - base_model_id (str): - Optional. The ID of the ``base`` model. If it is specified, - the new model will be created based on the ``base`` model. - Otherwise, the new model will be created from scratch. The - ``base`` model must be in the same ``project`` and - ``location`` as the new model to create, and have the same - ``model_type``. - train_budget_milli_node_hours (int): - Optional. The train budget of creating this model, expressed - in milli node hours i.e. 1,000 value in this field means 1 - node hour. The actual ``train_cost`` will be equal or less - than this value. If further model training ceases to provide - any improvements, it will stop without using full budget and - the stop_reason will be ``MODEL_CONVERGED``. Note, node_hour - = actual_hour \* number_of_nodes_invovled. For model type - ``cloud``\ (default), the train budget must be between 8,000 - and 800,000 milli node hours, inclusive. The default value - is 192, 000 which represents one day in wall time. For model - type ``mobile-low-latency-1``, ``mobile-versatile-1``, - ``mobile-high-accuracy-1``, - ``mobile-core-ml-low-latency-1``, - ``mobile-core-ml-versatile-1``, - ``mobile-core-ml-high-accuracy-1``, the train budget must be - between 1,000 and 100,000 milli node hours, inclusive. The - default value is 24, 000 which represents one day in wall - time. - train_cost_milli_node_hours (int): - Output only. The actual train cost of - creating this model, expressed in milli node - hours, i.e. 1,000 value in this field means 1 - node hour. Guaranteed to not exceed the train - budget. - stop_reason (str): - Output only. The reason that this create model operation - stopped, e.g. ``BUDGET_REACHED``, ``MODEL_CONVERGED``. - model_type (str): - Optional. Type of the model. The available values are: - - - ``cloud`` - Model to be used via prediction calls to - AutoML API. This is the default value. - - ``mobile-low-latency-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have low latency, but may have - lower prediction quality than other models. - - ``mobile-versatile-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. - - ``mobile-high-accuracy-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have a higher latency, but should - also have a higher prediction quality than other models. - - ``mobile-core-ml-low-latency-1`` - A model that, in - addition to providing prediction via AutoML API, can also - be exported (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile device with Core ML afterwards. - Expected to have low latency, but may have lower - prediction quality than other models. - - ``mobile-core-ml-versatile-1`` - A model that, in - addition to providing prediction via AutoML API, can also - be exported (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile device with Core ML afterwards. - - ``mobile-core-ml-high-accuracy-1`` - A model that, in - addition to providing prediction via AutoML API, can also - be exported (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile device with Core ML afterwards. - Expected to have a higher latency, but should also have a - higher prediction quality than other models. - node_qps (float): - Output only. An approximate number of online - prediction QPS that can be supported by this - model per each node on which it is deployed. - node_count (int): - Output only. The number of nodes this model is deployed on. - A node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the node_qps field. - """ - - base_model_id: str = proto.Field( - proto.STRING, - number=1, - ) - train_budget_milli_node_hours: int = proto.Field( - proto.INT64, - number=16, - ) - train_cost_milli_node_hours: int = proto.Field( - proto.INT64, - number=17, - ) - stop_reason: str = proto.Field( - proto.STRING, - number=5, - ) - model_type: str = proto.Field( - proto.STRING, - number=7, - ) - node_qps: float = proto.Field( - proto.DOUBLE, - number=13, - ) - node_count: int = proto.Field( - proto.INT64, - number=14, - ) - - -class ImageObjectDetectionModelMetadata(proto.Message): - r"""Model metadata specific to image object detection. - - Attributes: - model_type (str): - Optional. Type of the model. The available values are: - - - ``cloud-high-accuracy-1`` - (default) A model to be used - via prediction calls to AutoML API. Expected to have a - higher latency, but should also have a higher prediction - quality than other models. - - ``cloud-low-latency-1`` - A model to be used via - prediction calls to AutoML API. Expected to have low - latency, but may have lower prediction quality than other - models. - - ``mobile-low-latency-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have low latency, but may have - lower prediction quality than other models. - - ``mobile-versatile-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. - - ``mobile-high-accuracy-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have a higher latency, but should - also have a higher prediction quality than other models. - node_count (int): - Output only. The number of nodes this model is deployed on. - A node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the qps_per_node - field. - node_qps (float): - Output only. An approximate number of online - prediction QPS that can be supported by this - model per each node on which it is deployed. - stop_reason (str): - Output only. The reason that this create model operation - stopped, e.g. ``BUDGET_REACHED``, ``MODEL_CONVERGED``. - train_budget_milli_node_hours (int): - Optional. The train budget of creating this model, expressed - in milli node hours i.e. 1,000 value in this field means 1 - node hour. The actual ``train_cost`` will be equal or less - than this value. If further model training ceases to provide - any improvements, it will stop without using full budget and - the stop_reason will be ``MODEL_CONVERGED``. Note, node_hour - = actual_hour \* number_of_nodes_invovled. For model type - ``cloud-high-accuracy-1``\ (default) and - ``cloud-low-latency-1``, the train budget must be between - 20,000 and 900,000 milli node hours, inclusive. The default - value is 216, 000 which represents one day in wall time. For - model type ``mobile-low-latency-1``, ``mobile-versatile-1``, - ``mobile-high-accuracy-1``, - ``mobile-core-ml-low-latency-1``, - ``mobile-core-ml-versatile-1``, - ``mobile-core-ml-high-accuracy-1``, the train budget must be - between 1,000 and 100,000 milli node hours, inclusive. The - default value is 24, 000 which represents one day in wall - time. - train_cost_milli_node_hours (int): - Output only. The actual train cost of - creating this model, expressed in milli node - hours, i.e. 1,000 value in this field means 1 - node hour. Guaranteed to not exceed the train - budget. - """ - - model_type: str = proto.Field( - proto.STRING, - number=1, - ) - node_count: int = proto.Field( - proto.INT64, - number=3, - ) - node_qps: float = proto.Field( - proto.DOUBLE, - number=4, - ) - stop_reason: str = proto.Field( - proto.STRING, - number=5, - ) - train_budget_milli_node_hours: int = proto.Field( - proto.INT64, - number=6, - ) - train_cost_milli_node_hours: int = proto.Field( - proto.INT64, - number=7, - ) - - -class ImageClassificationModelDeploymentMetadata(proto.Message): - r"""Model deployment metadata specific to Image Classification. - - Attributes: - node_count (int): - Input only. The number of nodes to deploy the model on. A - node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the model's - [node_qps][google.cloud.automl.v1.ImageClassificationModelMetadata.node_qps]. - Must be between 1 and 100, inclusive on both ends. - """ - - node_count: int = proto.Field( - proto.INT64, - number=1, - ) - - -class ImageObjectDetectionModelDeploymentMetadata(proto.Message): - r"""Model deployment metadata specific to Image Object Detection. - - Attributes: - node_count (int): - Input only. The number of nodes to deploy the model on. A - node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the model's - [qps_per_node][google.cloud.automl.v1.ImageObjectDetectionModelMetadata.qps_per_node]. - Must be between 1 and 100, inclusive on both ends. - """ - - node_count: int = proto.Field( - proto.INT64, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/io.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/io.py deleted file mode 100644 index 185cae251c8c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/io.py +++ /dev/null @@ -1,1523 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'InputConfig', - 'BatchPredictInputConfig', - 'DocumentInputConfig', - 'OutputConfig', - 'BatchPredictOutputConfig', - 'ModelExportOutputConfig', - 'GcsSource', - 'GcsDestination', - }, -) - - -class InputConfig(proto.Message): - r"""Input configuration for - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData] - action. - - The format of input depends on dataset_metadata the Dataset into - which the import is happening has. As input source the - [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source] is - expected, unless specified otherwise. Additionally any input .CSV - file by itself must be 100MB or smaller, unless specified otherwise. - If an "example" file (that is, image, video etc.) with identical - content (even if it had different ``GCS_FILE_PATH``) is mentioned - multiple times, then its label, bounding boxes etc. are appended. - The same file should be always provided with the same ``ML_USE`` and - ``GCS_FILE_PATH``, if it is not, then these values are - nondeterministically selected from the given ones. - - The formats are represented in EBNF with commas being literal and - with non-terminal symbols defined near the end of this comment. The - formats are: - - AutoML Vision - ^^^^^^^^^^^^^ - - Classification - '''''''''''''' - - See `Preparing your training - data `__ for - more information. - - CSV file(s) with each line in format: - - :: - - ML_USE,GCS_FILE_PATH,LABEL,LABEL,... - - - ``ML_USE`` - Identifies the data set that the current row (file) - applies to. This value can be one of the following: - - - ``TRAIN`` - Rows in this file are used to train the model. - - ``TEST`` - Rows in this file are used to test the model during - training. - - ``UNASSIGNED`` - Rows in this file are not categorized. They - are Automatically divided into train and test data. 80% for - training and 20% for testing. - - - ``GCS_FILE_PATH`` - The Google Cloud Storage location of an image - of up to 30MB in size. Supported extensions: .JPEG, .GIF, .PNG, - .WEBP, .BMP, .TIFF, .ICO. - - - ``LABEL`` - A label that identifies the object in the image. - - For the ``MULTICLASS`` classification type, at most one ``LABEL`` is - allowed per image. If an image has not yet been labeled, then it - should be mentioned just once with no ``LABEL``. - - Some sample rows: - - :: - - TRAIN,gs://folder/image1.jpg,daisy - TEST,gs://folder/image2.jpg,dandelion,tulip,rose - UNASSIGNED,gs://folder/image3.jpg,daisy - UNASSIGNED,gs://folder/image4.jpg - - Object Detection - '''''''''''''''' - - See `Preparing your training - data `__ - for more information. - - A CSV file(s) with each line in format: - - :: - - ML_USE,GCS_FILE_PATH,[LABEL],(BOUNDING_BOX | ,,,,,,,) - - - ``ML_USE`` - Identifies the data set that the current row (file) - applies to. This value can be one of the following: - - - ``TRAIN`` - Rows in this file are used to train the model. - - ``TEST`` - Rows in this file are used to test the model during - training. - - ``UNASSIGNED`` - Rows in this file are not categorized. They - are Automatically divided into train and test data. 80% for - training and 20% for testing. - - - ``GCS_FILE_PATH`` - The Google Cloud Storage location of an image - of up to 30MB in size. Supported extensions: .JPEG, .GIF, .PNG. - Each image is assumed to be exhaustively labeled. - - - ``LABEL`` - A label that identifies the object in the image - specified by the ``BOUNDING_BOX``. - - - ``BOUNDING BOX`` - The vertices of an object in the example - image. The minimum allowed ``BOUNDING_BOX`` edge length is 0.01, - and no more than 500 ``BOUNDING_BOX`` instances per image are - allowed (one ``BOUNDING_BOX`` per line). If an image has no - looked for objects then it should be mentioned just once with no - LABEL and the ",,,,,,," in place of the ``BOUNDING_BOX``. - - **Four sample rows:** - - :: - - TRAIN,gs://folder/image1.png,car,0.1,0.1,,,0.3,0.3,, - TRAIN,gs://folder/image1.png,bike,.7,.6,,,.8,.9,, - UNASSIGNED,gs://folder/im2.png,car,0.1,0.1,0.2,0.1,0.2,0.3,0.1,0.3 - TEST,gs://folder/im3.png,,,,,,,,, - - .. raw:: html - - - - - AutoML Video Intelligence - ^^^^^^^^^^^^^^^^^^^^^^^^^ - - Classification - '''''''''''''' - - See `Preparing your training - data `__ - for more information. - - CSV file(s) with each line in format: - - :: - - ML_USE,GCS_FILE_PATH - - For ``ML_USE``, do not use ``VALIDATE``. - - ``GCS_FILE_PATH`` is the path to another .csv file that describes - training example for a given ``ML_USE``, using the following row - format: - - :: - - GCS_FILE_PATH,(LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END | ,,) - - Here ``GCS_FILE_PATH`` leads to a video of up to 50GB in size and up - to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI. - - ``TIME_SEGMENT_START`` and ``TIME_SEGMENT_END`` must be within the - length of the video, and the end time must be after the start time. - Any segment of a video which has one or more labels on it, is - considered a hard negative for all other labels. Any segment with no - labels on it is considered to be unknown. If a whole video is - unknown, then it should be mentioned just once with ",," in place of - ``LABEL, TIME_SEGMENT_START,TIME_SEGMENT_END``. - - Sample top level CSV file: - - :: - - TRAIN,gs://folder/train_videos.csv - TEST,gs://folder/test_videos.csv - UNASSIGNED,gs://folder/other_videos.csv - - Sample rows of a CSV file for a particular ML_USE: - - :: - - gs://folder/video1.avi,car,120,180.000021 - gs://folder/video1.avi,bike,150,180.000021 - gs://folder/vid2.avi,car,0,60.5 - gs://folder/vid3.avi,,, - - Object Tracking - ''''''''''''''' - - See `Preparing your training - data `__ - for more information. - - CSV file(s) with each line in format: - - :: - - ML_USE,GCS_FILE_PATH - - For ``ML_USE``, do not use ``VALIDATE``. - - ``GCS_FILE_PATH`` is the path to another .csv file that describes - training example for a given ``ML_USE``, using the following row - format: - - :: - - GCS_FILE_PATH,LABEL,[INSTANCE_ID],TIMESTAMP,BOUNDING_BOX - - or - - :: - - GCS_FILE_PATH,,,,,,,,,, - - Here ``GCS_FILE_PATH`` leads to a video of up to 50GB in size and up - to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI. - Providing ``INSTANCE_ID``\ s can help to obtain a better model. When - a specific labeled entity leaves the video frame, and shows up - afterwards it is not required, albeit preferable, that the same - ``INSTANCE_ID`` is given to it. - - ``TIMESTAMP`` must be within the length of the video, the - ``BOUNDING_BOX`` is assumed to be drawn on the closest video's frame - to the ``TIMESTAMP``. Any mentioned by the ``TIMESTAMP`` frame is - expected to be exhaustively labeled and no more than 500 - ``BOUNDING_BOX``-es per frame are allowed. If a whole video is - unknown, then it should be mentioned just once with ",,,,,,,,,," in - place of ``LABEL, [INSTANCE_ID],TIMESTAMP,BOUNDING_BOX``. - - Sample top level CSV file: - - :: - - TRAIN,gs://folder/train_videos.csv - TEST,gs://folder/test_videos.csv - UNASSIGNED,gs://folder/other_videos.csv - - Seven sample rows of a CSV file for a particular ML_USE: - - :: - - gs://folder/video1.avi,car,1,12.10,0.8,0.8,0.9,0.8,0.9,0.9,0.8,0.9 - gs://folder/video1.avi,car,1,12.90,0.4,0.8,0.5,0.8,0.5,0.9,0.4,0.9 - gs://folder/video1.avi,car,2,12.10,.4,.2,.5,.2,.5,.3,.4,.3 - gs://folder/video1.avi,car,2,12.90,.8,.2,,,.9,.3,, - gs://folder/video1.avi,bike,,12.50,.45,.45,,,.55,.55,, - gs://folder/video2.avi,car,1,0,.1,.9,,,.9,.1,, - gs://folder/video2.avi,,,,,,,,,,, - - AutoML Natural Language - ^^^^^^^^^^^^^^^^^^^^^^^ - - Entity Extraction - ''''''''''''''''' - - See `Preparing your training - data `__ for - more information. - - One or more CSV file(s) with each line in the following format: - - :: - - ML_USE,GCS_FILE_PATH - - - ``ML_USE`` - Identifies the data set that the current row (file) - applies to. This value can be one of the following: - - - ``TRAIN`` - Rows in this file are used to train the model. - - ``TEST`` - Rows in this file are used to test the model during - training. - - ``UNASSIGNED`` - Rows in this file are not categorized. They - are Automatically divided into train and test data. 80% for - training and 20% for testing.. - - - ``GCS_FILE_PATH`` - a Identifies JSON Lines (.JSONL) file stored - in Google Cloud Storage that contains in-line text in-line as - documents for model training. - - After the training data set has been determined from the ``TRAIN`` - and ``UNASSIGNED`` CSV files, the training data is divided into - train and validation data sets. 70% for training and 30% for - validation. - - For example: - - :: - - TRAIN,gs://folder/file1.jsonl - VALIDATE,gs://folder/file2.jsonl - TEST,gs://folder/file3.jsonl - - **In-line JSONL files** - - In-line .JSONL files contain, per line, a JSON document that wraps a - [``text_snippet``][google.cloud.automl.v1.TextSnippet] field - followed by one or more - [``annotations``][google.cloud.automl.v1.AnnotationPayload] fields, - which have ``display_name`` and ``text_extraction`` fields to - describe the entity from the text snippet. Multiple JSON documents - can be separated using line breaks (\n). - - The supplied text must be annotated exhaustively. For example, if - you include the text "horse", but do not label it as "animal", then - "horse" is assumed to not be an "animal". - - Any given text snippet content must have 30,000 characters or less, - and also be UTF-8 NFC encoded. ASCII is accepted as it is UTF-8 NFC - encoded. - - For example: - - :: - - { - "text_snippet": { - "content": "dog car cat" - }, - "annotations": [ - { - "display_name": "animal", - "text_extraction": { - "text_segment": {"start_offset": 0, "end_offset": 2} - } - }, - { - "display_name": "vehicle", - "text_extraction": { - "text_segment": {"start_offset": 4, "end_offset": 6} - } - }, - { - "display_name": "animal", - "text_extraction": { - "text_segment": {"start_offset": 8, "end_offset": 10} - } - } - ] - }\n - { - "text_snippet": { - "content": "This dog is good." - }, - "annotations": [ - { - "display_name": "animal", - "text_extraction": { - "text_segment": {"start_offset": 5, "end_offset": 7} - } - } - ] - } - - **JSONL files that reference documents** - - .JSONL files contain, per line, a JSON document that wraps a - ``input_config`` that contains the path to a source document. - Multiple JSON documents can be separated using line breaks (\n). - - Supported document extensions: .PDF, .TIF, .TIFF - - For example: - - :: - - { - "document": { - "input_config": { - "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ] - } - } - } - }\n - { - "document": { - "input_config": { - "gcs_source": { "input_uris": [ "gs://folder/document2.tif" ] - } - } - } - } - - **In-line JSONL files with document layout information** - - **Note:** You can only annotate documents using the UI. The format - described below applies to annotated documents exported using the UI - or ``exportData``. - - In-line .JSONL files for documents contain, per line, a JSON - document that wraps a ``document`` field that provides the textual - content of the document and the layout information. - - For example: - - :: - - { - "document": { - "document_text": { - "content": "dog car cat" - } - "layout": [ - { - "text_segment": { - "start_offset": 0, - "end_offset": 11, - }, - "page_number": 1, - "bounding_poly": { - "normalized_vertices": [ - {"x": 0.1, "y": 0.1}, - {"x": 0.1, "y": 0.3}, - {"x": 0.3, "y": 0.3}, - {"x": 0.3, "y": 0.1}, - ], - }, - "text_segment_type": TOKEN, - } - ], - "document_dimensions": { - "width": 8.27, - "height": 11.69, - "unit": INCH, - } - "page_count": 3, - }, - "annotations": [ - { - "display_name": "animal", - "text_extraction": { - "text_segment": {"start_offset": 0, "end_offset": 3} - } - }, - { - "display_name": "vehicle", - "text_extraction": { - "text_segment": {"start_offset": 4, "end_offset": 7} - } - }, - { - "display_name": "animal", - "text_extraction": { - "text_segment": {"start_offset": 8, "end_offset": 11} - } - }, - ], - - Classification - '''''''''''''' - - See `Preparing your training - data `__ - for more information. - - One or more CSV file(s) with each line in the following format: - - :: - - ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),LABEL,LABEL,... - - - ``ML_USE`` - Identifies the data set that the current row (file) - applies to. This value can be one of the following: - - - ``TRAIN`` - Rows in this file are used to train the model. - - ``TEST`` - Rows in this file are used to test the model during - training. - - ``UNASSIGNED`` - Rows in this file are not categorized. They - are Automatically divided into train and test data. 80% for - training and 20% for testing. - - - ``TEXT_SNIPPET`` and ``GCS_FILE_PATH`` are distinguished by a - pattern. If the column content is a valid Google Cloud Storage - file path, that is, prefixed by "gs://", it is treated as a - ``GCS_FILE_PATH``. Otherwise, if the content is enclosed in - double quotes (""), it is treated as a ``TEXT_SNIPPET``. For - ``GCS_FILE_PATH``, the path must lead to a file with supported - extension and UTF-8 encoding, for example, - "gs://folder/content.txt" AutoML imports the file content as a - text snippet. For ``TEXT_SNIPPET``, AutoML imports the column - content excluding quotes. In both cases, size of the content must - be 10MB or less in size. For zip files, the size of each file - inside the zip must be 10MB or less in size. - - For the ``MULTICLASS`` classification type, at most one ``LABEL`` - is allowed. - - The ``ML_USE`` and ``LABEL`` columns are optional. Supported file - extensions: .TXT, .PDF, .TIF, .TIFF, .ZIP - - A maximum of 100 unique labels are allowed per CSV row. - - Sample rows: - - :: - - TRAIN,"They have bad food and very rude",RudeService,BadFood - gs://folder/content.txt,SlowService - TEST,gs://folder/document.pdf - VALIDATE,gs://folder/text_files.zip,BadFood - - Sentiment Analysis - '''''''''''''''''' - - See `Preparing your training - data `__ - for more information. - - CSV file(s) with each line in format: - - :: - - ML_USE,(TEXT_SNIPPET | GCS_FILE_PATH),SENTIMENT - - - ``ML_USE`` - Identifies the data set that the current row (file) - applies to. This value can be one of the following: - - - ``TRAIN`` - Rows in this file are used to train the model. - - ``TEST`` - Rows in this file are used to test the model during - training. - - ``UNASSIGNED`` - Rows in this file are not categorized. They - are Automatically divided into train and test data. 80% for - training and 20% for testing. - - - ``TEXT_SNIPPET`` and ``GCS_FILE_PATH`` are distinguished by a - pattern. If the column content is a valid Google Cloud Storage - file path, that is, prefixed by "gs://", it is treated as a - ``GCS_FILE_PATH``. Otherwise, if the content is enclosed in - double quotes (""), it is treated as a ``TEXT_SNIPPET``. For - ``GCS_FILE_PATH``, the path must lead to a file with supported - extension and UTF-8 encoding, for example, - "gs://folder/content.txt" AutoML imports the file content as a - text snippet. For ``TEXT_SNIPPET``, AutoML imports the column - content excluding quotes. In both cases, size of the content must - be 128kB or less in size. For zip files, the size of each file - inside the zip must be 128kB or less in size. - - The ``ML_USE`` and ``SENTIMENT`` columns are optional. Supported - file extensions: .TXT, .PDF, .TIF, .TIFF, .ZIP - - - ``SENTIMENT`` - An integer between 0 and - Dataset.text_sentiment_dataset_metadata.sentiment_max - (inclusive). Describes the ordinal of the sentiment - higher - value means a more positive sentiment. All the values are - completely relative, i.e. neither 0 needs to mean a negative or - neutral sentiment nor sentiment_max needs to mean a positive one - - it is just required that 0 is the least positive sentiment in - the data, and sentiment_max is the most positive one. The - SENTIMENT shouldn't be confused with "score" or "magnitude" from - the previous Natural Language Sentiment Analysis API. All - SENTIMENT values between 0 and sentiment_max must be represented - in the imported data. On prediction the same 0 to sentiment_max - range will be used. The difference between neighboring sentiment - values needs not to be uniform, e.g. 1 and 2 may be similar - whereas the difference between 2 and 3 may be large. - - Sample rows: - - :: - - TRAIN,"@freewrytin this is way too good for your product",2 - gs://folder/content.txt,3 - TEST,gs://folder/document.pdf - VALIDATE,gs://folder/text_files.zip,2 - - AutoML Tables - ^^^^^^^^^^^^^ - - See `Preparing your training - data `__ for - more information. - - You can use either - [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source] or - [bigquery_source][google.cloud.automl.v1.InputConfig.bigquery_source]. - All input is concatenated into a single - [primary_table_spec_id][google.cloud.automl.v1.TablesDatasetMetadata.primary_table_spec_id] - - **For gcs_source:** - - CSV file(s), where the first row of the first file is the header, - containing unique column names. If the first row of a subsequent - file is the same as the header, then it is also treated as a header. - All other rows contain values for the corresponding columns. - - Each .CSV file by itself must be 10GB or smaller, and their total - size must be 100GB or smaller. - - First three sample rows of a CSV file: - - .. raw:: html - -
-        "Id","First Name","Last Name","Dob","Addresses"
-        "1","John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
-        "2","Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
-        
- - **For bigquery_source:** - - An URI of a BigQuery table. The user data size of the BigQuery table - must be 100GB or smaller. - - An imported table must have between 2 and 1,000 columns, inclusive, - and between 1000 and 100,000,000 rows, inclusive. There are at most - 5 import data running in parallel. - - **Input field definitions:** - - ``ML_USE`` : ("TRAIN" \| "VALIDATE" \| "TEST" \| "UNASSIGNED") - Describes how the given example (file) should be used for model - training. "UNASSIGNED" can be used when user has no preference. - - ``GCS_FILE_PATH`` : The path to a file on Google Cloud Storage. For - example, "gs://folder/image1.png". - - ``LABEL`` : A display name of an object on an image, video etc., - e.g. "dog". Must be up to 32 characters long and can consist only of - ASCII Latin letters A-Z and a-z, underscores(_), and ASCII digits - 0-9. For each label an AnnotationSpec is created which display_name - becomes the label; AnnotationSpecs are given back in predictions. - - ``INSTANCE_ID`` : A positive integer that identifies a specific - instance of a labeled entity on an example. Used e.g. to track two - cars on a video while being able to tell apart which one is which. - - ``BOUNDING_BOX`` : (``VERTEX,VERTEX,VERTEX,VERTEX`` \| - ``VERTEX,,,VERTEX,,``) A rectangle parallel to the frame of the - example (image, video). If 4 vertices are given they are connected - by edges in the order provided, if 2 are given they are recognized - as diagonally opposite vertices of the rectangle. - - ``VERTEX`` : (``COORDINATE,COORDINATE``) First coordinate is - horizontal (x), the second is vertical (y). - - ``COORDINATE`` : A float in 0 to 1 range, relative to total length - of image or video in given dimension. For fractions the leading - non-decimal 0 can be omitted (i.e. 0.3 = .3). Point 0,0 is in top - left. - - ``TIME_SEGMENT_START`` : (``TIME_OFFSET``) Expresses a beginning, - inclusive, of a time segment within an example that has a time - dimension (e.g. video). - - ``TIME_SEGMENT_END`` : (``TIME_OFFSET``) Expresses an end, - exclusive, of a time segment within n example that has a time - dimension (e.g. video). - - ``TIME_OFFSET`` : A number of seconds as measured from the start of - an example (e.g. video). Fractions are allowed, up to a microsecond - precision. "inf" is allowed, and it means the end of the example. - - ``TEXT_SNIPPET`` : The content of a text snippet, UTF-8 encoded, - enclosed within double quotes (""). - - ``DOCUMENT`` : A field that provides the textual content with - document and the layout information. - - **Errors:** - - If any of the provided CSV files can't be parsed or if more than - certain percent of CSV rows cannot be processed then the operation - fails and nothing is imported. Regardless of overall success or - failure the per-row failures, up to a certain count cap, is listed - in Operation.metadata.partial_failures. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_source (google.cloud.automl_v1.types.GcsSource): - The Google Cloud Storage location for the input content. For - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData], - ``gcs_source`` points to a CSV file with a structure - described in - [InputConfig][google.cloud.automl.v1.InputConfig]. - - This field is a member of `oneof`_ ``source``. - params (MutableMapping[str, str]): - Additional domain-specific parameters describing the - semantic of the imported data, any string must be up to - 25000 characters long. - - AutoML Tables - ^^^^^^^^^^^^^ - - ``schema_inference_version`` : (integer) This value must be - supplied. The version of the algorithm to use for the - initial inference of the column data types of the imported - table. Allowed values: "1". - """ - - gcs_source: 'GcsSource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='GcsSource', - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class BatchPredictInputConfig(proto.Message): - r"""Input configuration for BatchPredict Action. - - The format of input depends on the ML problem of the model used for - prediction. As input source the - [gcs_source][google.cloud.automl.v1.InputConfig.gcs_source] is - expected, unless specified otherwise. - - The formats are represented in EBNF with commas being literal and - with non-terminal symbols defined near the end of this comment. The - formats are: - - AutoML Vision - ^^^^^^^^^^^^^ - - Classification - '''''''''''''' - - One or more CSV files where each line is a single column: - - :: - - GCS_FILE_PATH - - The Google Cloud Storage location of an image of up to 30MB in size. - Supported extensions: .JPEG, .GIF, .PNG. This path is treated as the - ID in the batch predict output. - - Sample rows: - - :: - - gs://folder/image1.jpeg - gs://folder/image2.gif - gs://folder/image3.png - - Object Detection - '''''''''''''''' - - One or more CSV files where each line is a single column: - - :: - - GCS_FILE_PATH - - The Google Cloud Storage location of an image of up to 30MB in size. - Supported extensions: .JPEG, .GIF, .PNG. This path is treated as the - ID in the batch predict output. - - Sample rows: - - :: - - gs://folder/image1.jpeg - gs://folder/image2.gif - gs://folder/image3.png - - AutoML Video Intelligence - ^^^^^^^^^^^^^^^^^^^^^^^^^ - - Classification - '''''''''''''' - - One or more CSV files where each line is a single column: - - :: - - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END - - ``GCS_FILE_PATH`` is the Google Cloud Storage location of video up - to 50GB in size and up to 3h in duration duration. Supported - extensions: .MOV, .MPEG4, .MP4, .AVI. - - ``TIME_SEGMENT_START`` and ``TIME_SEGMENT_END`` must be within the - length of the video, and the end time must be after the start time. - - Sample rows: - - :: - - gs://folder/video1.mp4,10,40 - gs://folder/video1.mp4,20,60 - gs://folder/vid2.mov,0,inf - - Object Tracking - ''''''''''''''' - - One or more CSV files where each line is a single column: - - :: - - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END - - ``GCS_FILE_PATH`` is the Google Cloud Storage location of video up - to 50GB in size and up to 3h in duration duration. Supported - extensions: .MOV, .MPEG4, .MP4, .AVI. - - ``TIME_SEGMENT_START`` and ``TIME_SEGMENT_END`` must be within the - length of the video, and the end time must be after the start time. - - Sample rows: - - :: - - gs://folder/video1.mp4,10,40 - gs://folder/video1.mp4,20,60 - gs://folder/vid2.mov,0,inf - - AutoML Natural Language - ^^^^^^^^^^^^^^^^^^^^^^^ - - Classification - '''''''''''''' - - One or more CSV files where each line is a single column: - - :: - - GCS_FILE_PATH - - ``GCS_FILE_PATH`` is the Google Cloud Storage location of a text - file. Supported file extensions: .TXT, .PDF, .TIF, .TIFF - - Text files can be no larger than 10MB in size. - - Sample rows: - - :: - - gs://folder/text1.txt - gs://folder/text2.pdf - gs://folder/text3.tif - - Sentiment Analysis - '''''''''''''''''' - - One or more CSV files where each line is a single column: - - :: - - GCS_FILE_PATH - - ``GCS_FILE_PATH`` is the Google Cloud Storage location of a text - file. Supported file extensions: .TXT, .PDF, .TIF, .TIFF - - Text files can be no larger than 128kB in size. - - Sample rows: - - :: - - gs://folder/text1.txt - gs://folder/text2.pdf - gs://folder/text3.tif - - Entity Extraction - ''''''''''''''''' - - One or more JSONL (JSON Lines) files that either provide inline text - or documents. You can only use one format, either inline text or - documents, for a single call to [AutoMl.BatchPredict]. - - Each JSONL file contains a per line a proto that wraps a temporary - user-assigned TextSnippet ID (string up to 2000 characters long) - called "id", a TextSnippet proto (in JSON representation) and zero - or more TextFeature protos. Any given text snippet content must have - 30,000 characters or less, and also be UTF-8 NFC encoded (ASCII - already is). The IDs provided should be unique. - - Each document JSONL file contains, per line, a proto that wraps a - Document proto with ``input_config`` set. Each document cannot - exceed 2MB in size. - - Supported document extensions: .PDF, .TIF, .TIFF - - Each JSONL file must not exceed 100MB in size, and no more than 20 - JSONL files may be passed. - - Sample inline JSONL file (Shown with artificial line breaks. Actual - line breaks are denoted by "\n".): - - :: - - { - "id": "my_first_id", - "text_snippet": { "content": "dog car cat"}, - "text_features": [ - { - "text_segment": {"start_offset": 4, "end_offset": 6}, - "structural_type": PARAGRAPH, - "bounding_poly": { - "normalized_vertices": [ - {"x": 0.1, "y": 0.1}, - {"x": 0.1, "y": 0.3}, - {"x": 0.3, "y": 0.3}, - {"x": 0.3, "y": 0.1}, - ] - }, - } - ], - }\n - { - "id": "2", - "text_snippet": { - "content": "Extended sample content", - "mime_type": "text/plain" - } - } - - Sample document JSONL file (Shown with artificial line breaks. - Actual line breaks are denoted by "\n".): - - :: - - { - "document": { - "input_config": { - "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ] - } - } - } - }\n - { - "document": { - "input_config": { - "gcs_source": { "input_uris": [ "gs://folder/document2.tif" ] - } - } - } - } - - AutoML Tables - ^^^^^^^^^^^^^ - - See `Preparing your training - data `__ - for more information. - - You can use either - [gcs_source][google.cloud.automl.v1.BatchPredictInputConfig.gcs_source] - or [bigquery_source][BatchPredictInputConfig.bigquery_source]. - - **For gcs_source:** - - CSV file(s), each by itself 10GB or smaller and total size must be - 100GB or smaller, where first file must have a header containing - column names. If the first row of a subsequent file is the same as - the header, then it is also treated as a header. All other rows - contain values for the corresponding columns. - - The column names must contain the model's - [input_feature_column_specs'][google.cloud.automl.v1.TablesModelMetadata.input_feature_column_specs] - [display_name-s][google.cloud.automl.v1.ColumnSpec.display_name] - (order doesn't matter). The columns corresponding to the model's - input feature column specs must contain values compatible with the - column spec's data types. Prediction on all the rows, i.e. the CSV - lines, will be attempted. - - Sample rows from a CSV file: - - .. raw:: html - -
-        "First Name","Last Name","Dob","Addresses"
-        "John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]"
-        "Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]}
-        
- - **For bigquery_source:** - - The URI of a BigQuery table. The user data size of the BigQuery - table must be 100GB or smaller. - - The column names must contain the model's - [input_feature_column_specs'][google.cloud.automl.v1.TablesModelMetadata.input_feature_column_specs] - [display_name-s][google.cloud.automl.v1.ColumnSpec.display_name] - (order doesn't matter). The columns corresponding to the model's - input feature column specs must contain values compatible with the - column spec's data types. Prediction on all the rows of the table - will be attempted. - - **Input field definitions:** - - ``GCS_FILE_PATH`` : The path to a file on Google Cloud Storage. For - example, "gs://folder/video.avi". - - ``TIME_SEGMENT_START`` : (``TIME_OFFSET``) Expresses a beginning, - inclusive, of a time segment within an example that has a time - dimension (e.g. video). - - ``TIME_SEGMENT_END`` : (``TIME_OFFSET``) Expresses an end, - exclusive, of a time segment within n example that has a time - dimension (e.g. video). - - ``TIME_OFFSET`` : A number of seconds as measured from the start of - an example (e.g. video). Fractions are allowed, up to a microsecond - precision. "inf" is allowed, and it means the end of the example. - - **Errors:** - - If any of the provided CSV files can't be parsed or if more than - certain percent of CSV rows cannot be processed then the operation - fails and prediction does not happen. Regardless of overall success - or failure the per-row failures, up to a certain count cap, will be - listed in Operation.metadata.partial_failures. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_source (google.cloud.automl_v1.types.GcsSource): - Required. The Google Cloud Storage location - for the input content. - - This field is a member of `oneof`_ ``source``. - """ - - gcs_source: 'GcsSource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='GcsSource', - ) - - -class DocumentInputConfig(proto.Message): - r"""Input configuration of a - [Document][google.cloud.automl.v1.Document]. - - Attributes: - gcs_source (google.cloud.automl_v1.types.GcsSource): - The Google Cloud Storage location of the - document file. Only a single path should be - given. - - Max supported size: 512MB. - - Supported extensions: .PDF. - """ - - gcs_source: 'GcsSource' = proto.Field( - proto.MESSAGE, - number=1, - message='GcsSource', - ) - - -class OutputConfig(proto.Message): - r"""- For Translation: CSV file ``translation.csv``, with each line in - format: ML_USE,GCS_FILE_PATH GCS_FILE_PATH leads to a .TSV file - which describes examples that have given ML_USE, using the - following row format per line: TEXT_SNIPPET (in source language) - \\t TEXT_SNIPPET (in target language) - - - For Tables: Output depends on whether the dataset was imported - from Google Cloud Storage or BigQuery. Google Cloud Storage - case: - [gcs_destination][google.cloud.automl.v1p1beta.OutputConfig.gcs_destination] - must be set. Exported are CSV file(s) ``tables_1.csv``, - ``tables_2.csv``,...,\ ``tables_N.csv`` with each having as - header line the table's column names, and all other lines - contain values for the header columns. BigQuery case: - [bigquery_destination][google.cloud.automl.v1p1beta.OutputConfig.bigquery_destination] - pointing to a BigQuery project must be set. In the given - project a new dataset will be created with name - ``export_data__`` - where will be made BigQuery-dataset-name compatible (e.g. most - special characters will become underscores), and timestamp - will be in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" - format. In that dataset a new table called ``primary_table`` - will be created, and filled with precisely the same data as - this obtained on import. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.automl_v1.types.GcsDestination): - Required. The Google Cloud Storage location where the output - is to be written to. For Image Object Detection, Text - Extraction, Video Classification and Tables, in the given - directory a new directory will be created with name: - export_data-- where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ - ISO-8601 format. All export output will be written into that - directory. - - This field is a member of `oneof`_ ``destination``. - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - - -class BatchPredictOutputConfig(proto.Message): - r"""Output configuration for BatchPredict Action. - - As destination the - [gcs_destination][google.cloud.automl.v1.BatchPredictOutputConfig.gcs_destination] - must be set unless specified otherwise for a domain. If - gcs_destination is set then in the given directory a new directory - is created. Its name will be "prediction--", where timestamp is in - YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. The contents of it depends - on the ML problem the predictions are made for. - - - For Image Classification: In the created directory files - ``image_classification_1.jsonl``, - ``image_classification_2.jsonl``,...,\ ``image_classification_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of the successfully predicted images and annotations. A - single image will be listed only once with all its annotations, - and its annotations will never be split across files. Each .JSONL - file will contain, per line, a JSON representation of a proto - that wraps image's "ID" : "" followed by a list of zero - or more AnnotationPayload protos (called annotations), which have - classification detail populated. If prediction for any image - failed (partially or completely), then an additional - ``errors_1.jsonl``, ``errors_2.jsonl``,..., ``errors_N.jsonl`` - files will be created (N depends on total number of failed - predictions). These files will have a JSON representation of a - proto that wraps the same "ID" : "" but here followed - by exactly one - ```google.rpc.Status`` `__ - containing only ``code`` and ``message``\ fields. - - - For Image Object Detection: In the created directory files - ``image_object_detection_1.jsonl``, - ``image_object_detection_2.jsonl``,...,\ ``image_object_detection_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of the successfully predicted images and annotations. Each - .JSONL file will contain, per line, a JSON representation of a - proto that wraps image's "ID" : "" followed by a list - of zero or more AnnotationPayload protos (called annotations), - which have image_object_detection detail populated. A single - image will be listed only once with all its annotations, and its - annotations will never be split across files. If prediction for - any image failed (partially or completely), then additional - ``errors_1.jsonl``, ``errors_2.jsonl``,..., ``errors_N.jsonl`` - files will be created (N depends on total number of failed - predictions). These files will have a JSON representation of a - proto that wraps the same "ID" : "" but here followed - by exactly one - ```google.rpc.Status`` `__ - containing only ``code`` and ``message``\ fields. - - - For Video Classification: In the created directory a - video_classification.csv file, and a .JSON file per each video - classification requested in the input (i.e. each line in given - CSV(s)), will be created. - - :: - - The format of video_classification.csv is: - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS - where: - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 to 1 - the prediction input lines (i.e. video_classification.csv has - precisely the same number of lines as the prediction input had.) - JSON_FILE_NAME = Name of .JSON file in the output directory, which - contains prediction responses for the video time segment. - STATUS = "OK" if prediction completed successfully, or an error code - with message otherwise. If STATUS is not "OK" then the .JSON file - for that line may not exist or be empty. - - Each .JSON file, assuming STATUS is "OK", will contain a list of - AnnotationPayload protos in JSON format, which are the predictions - for the video time segment the file is assigned to in the - video_classification.csv. All AnnotationPayload protos will have - video_classification field set, and will be sorted by - video_classification.type field (note that the returned types are - governed by `classifaction_types` parameter in - [PredictService.BatchPredictRequest.params][]). - - - For Video Object Tracking: In the created directory a - video_object_tracking.csv file will be created, and multiple - files video_object_trackinng_1.json, - video_object_trackinng_2.json,..., video_object_trackinng_N.json, - where N is the number of requests in the input (i.e. the number - of lines in given CSV(s)). - - :: - - The format of video_object_tracking.csv is: - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS - where: - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 to 1 - the prediction input lines (i.e. video_object_tracking.csv has - precisely the same number of lines as the prediction input had.) - JSON_FILE_NAME = Name of .JSON file in the output directory, which - contains prediction responses for the video time segment. - STATUS = "OK" if prediction completed successfully, or an error - code with message otherwise. If STATUS is not "OK" then the .JSON - file for that line may not exist or be empty. - - Each .JSON file, assuming STATUS is "OK", will contain a list of - AnnotationPayload protos in JSON format, which are the predictions - for each frame of the video time segment the file is assigned to in - video_object_tracking.csv. All AnnotationPayload protos will have - video_object_tracking field set. - - - For Text Classification: In the created directory files - ``text_classification_1.jsonl``, - ``text_classification_2.jsonl``,...,\ ``text_classification_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of inputs and annotations found. - - :: - - Each .JSONL file will contain, per line, a JSON representation of a - proto that wraps input text file (or document) in - the text snippet (or document) proto and a list of - zero or more AnnotationPayload protos (called annotations), which - have classification detail populated. A single text file (or - document) will be listed only once with all its annotations, and its - annotations will never be split across files. - - If prediction for any input file (or document) failed (partially or - completely), then additional `errors_1.jsonl`, `errors_2.jsonl`,..., - `errors_N.jsonl` files will be created (N depends on total number of - failed predictions). These files will have a JSON representation of a - proto that wraps input file followed by exactly one - [`google.rpc.Status`](https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only `code` and `message`. - - - For Text Sentiment: In the created directory files - ``text_sentiment_1.jsonl``, - ``text_sentiment_2.jsonl``,...,\ ``text_sentiment_N.jsonl`` will - be created, where N may be 1, and depends on the total number of - inputs and annotations found. - - :: - - Each .JSONL file will contain, per line, a JSON representation of a - proto that wraps input text file (or document) in - the text snippet (or document) proto and a list of - zero or more AnnotationPayload protos (called annotations), which - have text_sentiment detail populated. A single text file (or - document) will be listed only once with all its annotations, and its - annotations will never be split across files. - - If prediction for any input file (or document) failed (partially or - completely), then additional `errors_1.jsonl`, `errors_2.jsonl`,..., - `errors_N.jsonl` files will be created (N depends on total number of - failed predictions). These files will have a JSON representation of a - proto that wraps input file followed by exactly one - [`google.rpc.Status`](https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only `code` and `message`. - - - For Text Extraction: In the created directory files - ``text_extraction_1.jsonl``, - ``text_extraction_2.jsonl``,...,\ ``text_extraction_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of inputs and annotations found. The contents of these - .JSONL file(s) depend on whether the input used inline text, or - documents. If input was inline, then each .JSONL file will - contain, per line, a JSON representation of a proto that wraps - given in request text snippet's "id" (if specified), followed by - input text snippet, and a list of zero or more AnnotationPayload - protos (called annotations), which have text_extraction detail - populated. A single text snippet will be listed only once with - all its annotations, and its annotations will never be split - across files. If input used documents, then each .JSONL file will - contain, per line, a JSON representation of a proto that wraps - given in request document proto, followed by its OCR-ed - representation in the form of a text snippet, finally followed by - a list of zero or more AnnotationPayload protos (called - annotations), which have text_extraction detail populated and - refer, via their indices, to the OCR-ed text snippet. A single - document (and its text snippet) will be listed only once with all - its annotations, and its annotations will never be split across - files. If prediction for any text snippet failed (partially or - completely), then additional ``errors_1.jsonl``, - ``errors_2.jsonl``,..., ``errors_N.jsonl`` files will be created - (N depends on total number of failed predictions). These files - will have a JSON representation of a proto that wraps either the - "id" : "" (in case of inline) or the document proto (in - case of document) but here followed by exactly one - ```google.rpc.Status`` `__ - containing only ``code`` and ``message``. - - - For Tables: Output depends on whether - [gcs_destination][google.cloud.automl.v1p1beta.BatchPredictOutputConfig.gcs_destination] - or - [bigquery_destination][google.cloud.automl.v1p1beta.BatchPredictOutputConfig.bigquery_destination] - is set (either is allowed). Google Cloud Storage case: In the - created directory files ``tables_1.csv``, ``tables_2.csv``,..., - ``tables_N.csv`` will be created, where N may be 1, and depends - on the total number of the successfully predicted rows. For all - CLASSIFICATION - [prediction_type-s][google.cloud.automl.v1p1beta.TablesModelMetadata.prediction_type]: - Each .csv file will contain a header, listing all columns' - [display_name-s][google.cloud.automl.v1p1beta.ColumnSpec.display_name] - given on input followed by M target column names in the format of - "<[target_column_specs][google.cloud.automl.v1p1beta.TablesModelMetadata.target_column_spec] - [display_name][google.cloud.automl.v1p1beta.ColumnSpec.display_name]>*\ score" - where M is the number of distinct target values, i.e. number of - distinct values in the target column of the table used to train - the model. Subsequent lines will contain the respective values of - successfully predicted rows, with the last, i.e. the target, - columns having the corresponding prediction - [scores][google.cloud.automl.v1p1beta.TablesAnnotation.score]. - For REGRESSION and FORECASTING - [prediction_type-s][google.cloud.automl.v1p1beta.TablesModelMetadata.prediction_type]: - Each .csv file will contain a header, listing all columns' - [display_name-s][google.cloud.automl.v1p1beta.display_name] given - on input followed by the predicted target column with name in the - format of - "predicted\ <[target_column_specs][google.cloud.automl.v1p1beta.TablesModelMetadata.target_column_spec] - [display_name][google.cloud.automl.v1p1beta.ColumnSpec.display_name]>" - Subsequent lines will contain the respective values of - successfully predicted rows, with the last, i.e. the target, - column having the predicted target value. If prediction for any - rows failed, then an additional ``errors_1.csv``, - ``errors_2.csv``,..., ``errors_N.csv`` will be created (N depends - on total number of failed rows). These files will have analogous - format as ``tables_*.csv``, but always with a single target - column - having*\ ```google.rpc.Status`` `__\ *represented - as a JSON string, and containing only ``code`` and ``message``. - BigQuery case: - [bigquery_destination][google.cloud.automl.v1p1beta.OutputConfig.bigquery_destination] - pointing to a BigQuery project must be set. In the given project - a new dataset will be created with name - ``prediction__`` - where will be made BigQuery-dataset-name compatible (e.g. most - special characters will become underscores), and timestamp will - be in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the - dataset two tables will be created, ``predictions``, and - ``errors``. The ``predictions`` table's column names will be the - input columns' - [display_name-s][google.cloud.automl.v1p1beta.ColumnSpec.display_name] - followed by the target column with name in the format of - "predicted*\ <[target_column_specs][google.cloud.automl.v1p1beta.TablesModelMetadata.target_column_spec] - [display_name][google.cloud.automl.v1p1beta.ColumnSpec.display_name]>" - The input feature columns will contain the respective values of - successfully predicted rows, with the target column having an - ARRAY of - [AnnotationPayloads][google.cloud.automl.v1p1beta.AnnotationPayload], - represented as STRUCT-s, containing - [TablesAnnotation][google.cloud.automl.v1p1beta.TablesAnnotation]. - The ``errors`` table contains rows for which the prediction has - failed, it has analogous input columns while the target column - name is in the format of - "errors_<[target_column_specs][google.cloud.automl.v1p1beta.TablesModelMetadata.target_column_spec] - [display_name][google.cloud.automl.v1p1beta.ColumnSpec.display_name]>", - and as a value has - ```google.rpc.Status`` `__ - represented as a STRUCT, and containing only ``code`` and - ``message``. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.automl_v1.types.GcsDestination): - Required. The Google Cloud Storage location - of the directory where the output is to be - written to. - - This field is a member of `oneof`_ ``destination``. - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - - -class ModelExportOutputConfig(proto.Message): - r"""Output configuration for ModelExport Action. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.automl_v1.types.GcsDestination): - Required. The Google Cloud Storage location where the model - is to be written to. This location may only be set for the - following model formats: "tflite", "edgetpu_tflite", - "tf_saved_model", "tf_js", "core_ml". - - Under the directory given as the destination a new one with - name "model-export--", where timestamp is in - YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format, will be created. - Inside the model and any of its supporting files will be - written. - - This field is a member of `oneof`_ ``destination``. - model_format (str): - The format in which the model must be exported. The - available, and default, formats depend on the problem and - model type (if given problem and type combination doesn't - have a format listed, it means its models are not - exportable): - - - For Image Classification mobile-low-latency-1, - mobile-versatile-1, mobile-high-accuracy-1: "tflite" - (default), "edgetpu_tflite", "tf_saved_model", "tf_js", - "docker". - - - For Image Classification mobile-core-ml-low-latency-1, - mobile-core-ml-versatile-1, - mobile-core-ml-high-accuracy-1: "core_ml" (default). - - - For Image Object Detection mobile-low-latency-1, - mobile-versatile-1, mobile-high-accuracy-1: "tflite", - "tf_saved_model", "tf_js". Formats description: - - - tflite - Used for Android mobile devices. - - - edgetpu_tflite - Used for `Edge - TPU `__ devices. - - - tf_saved_model - A tensorflow model in SavedModel format. - - - tf_js - A - `TensorFlow.js `__ model - that can be used in the browser and in Node.js using - JavaScript. - - - docker - Used for Docker containers. Use the params field - to customize the container. The container is verified to - work correctly on ubuntu 16.04 operating system. See more - at `containers - quickstart `__ - - - core_ml - Used for iOS mobile devices. - params (MutableMapping[str, str]): - Additional model-type and format specific parameters - describing the requirements for the to be exported model - files, any string must be up to 25000 characters long. - - - For ``docker`` format: ``cpu_architecture`` - (string) - "x86_64" (default). ``gpu_architecture`` - (string) - "none" (default), "nvidia". - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - model_format: str = proto.Field( - proto.STRING, - number=4, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class GcsSource(proto.Message): - r"""The Google Cloud Storage location for the input content. - - Attributes: - input_uris (MutableSequence[str]): - Required. Google Cloud Storage URIs to input files, up to - 2000 characters long. Accepted forms: - - - Full object path, e.g. gs://bucket/directory/object.csv - """ - - input_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class GcsDestination(proto.Message): - r"""The Google Cloud Storage location where the output is to be - written to. - - Attributes: - output_uri_prefix (str): - Required. Google Cloud Storage URI to output directory, up - to 2000 characters long. Accepted forms: - - - Prefix path: gs://bucket/directory The requesting user - must have write permission to the bucket. The directory - is created if it doesn't exist. - """ - - output_uri_prefix: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/model.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/model.py deleted file mode 100644 index f9af99a09eea..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/model.py +++ /dev/null @@ -1,201 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import image -from google.cloud.automl_v1.types import text -from google.cloud.automl_v1.types import translation -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'Model', - }, -) - - -class Model(proto.Message): - r"""API proto representing a trained machine learning model. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation_model_metadata (google.cloud.automl_v1.types.TranslationModelMetadata): - Metadata for translation models. - - This field is a member of `oneof`_ ``model_metadata``. - image_classification_model_metadata (google.cloud.automl_v1.types.ImageClassificationModelMetadata): - Metadata for image classification models. - - This field is a member of `oneof`_ ``model_metadata``. - text_classification_model_metadata (google.cloud.automl_v1.types.TextClassificationModelMetadata): - Metadata for text classification models. - - This field is a member of `oneof`_ ``model_metadata``. - image_object_detection_model_metadata (google.cloud.automl_v1.types.ImageObjectDetectionModelMetadata): - Metadata for image object detection models. - - This field is a member of `oneof`_ ``model_metadata``. - text_extraction_model_metadata (google.cloud.automl_v1.types.TextExtractionModelMetadata): - Metadata for text extraction models. - - This field is a member of `oneof`_ ``model_metadata``. - text_sentiment_model_metadata (google.cloud.automl_v1.types.TextSentimentModelMetadata): - Metadata for text sentiment models. - - This field is a member of `oneof`_ ``model_metadata``. - name (str): - Output only. Resource name of the model. Format: - ``projects/{project_id}/locations/{location_id}/models/{model_id}`` - display_name (str): - Required. The name of the model to show in the interface. - The name can be up to 32 characters long and can consist - only of ASCII Latin letters A-Z and a-z, underscores (_), - and ASCII digits 0-9. It must start with a letter. - dataset_id (str): - Required. The resource ID of the dataset used - to create the model. The dataset must come from - the same ancestor project and location. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the model - training finished and can be used for - prediction. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when this model was - last updated. - deployment_state (google.cloud.automl_v1.types.Model.DeploymentState): - Output only. Deployment state of the model. A - model can only serve prediction requests after - it gets deployed. - etag (str): - Used to perform a consistent - read-modify-write updates. If not set, a blind - "overwrite" update happens. - labels (MutableMapping[str, str]): - Optional. The labels with user-defined - metadata to organize your model. - Label keys and values can be no longer than 64 - characters (Unicode codepoints), can only - contain lowercase letters, numeric characters, - underscores and dashes. International characters - are allowed. Label values are optional. Label - keys must start with a letter. - - See https://goo.gl/xmQnxf for more information - on and examples of labels. - """ - class DeploymentState(proto.Enum): - r"""Deployment state of the model. - - Values: - DEPLOYMENT_STATE_UNSPECIFIED (0): - Should not be used, an un-set enum has this - value by default. - DEPLOYED (1): - Model is deployed. - UNDEPLOYED (2): - Model is not deployed. - """ - DEPLOYMENT_STATE_UNSPECIFIED = 0 - DEPLOYED = 1 - UNDEPLOYED = 2 - - translation_model_metadata: translation.TranslationModelMetadata = proto.Field( - proto.MESSAGE, - number=15, - oneof='model_metadata', - message=translation.TranslationModelMetadata, - ) - image_classification_model_metadata: image.ImageClassificationModelMetadata = proto.Field( - proto.MESSAGE, - number=13, - oneof='model_metadata', - message=image.ImageClassificationModelMetadata, - ) - text_classification_model_metadata: text.TextClassificationModelMetadata = proto.Field( - proto.MESSAGE, - number=14, - oneof='model_metadata', - message=text.TextClassificationModelMetadata, - ) - image_object_detection_model_metadata: image.ImageObjectDetectionModelMetadata = proto.Field( - proto.MESSAGE, - number=20, - oneof='model_metadata', - message=image.ImageObjectDetectionModelMetadata, - ) - text_extraction_model_metadata: text.TextExtractionModelMetadata = proto.Field( - proto.MESSAGE, - number=19, - oneof='model_metadata', - message=text.TextExtractionModelMetadata, - ) - text_sentiment_model_metadata: text.TextSentimentModelMetadata = proto.Field( - proto.MESSAGE, - number=22, - oneof='model_metadata', - message=text.TextSentimentModelMetadata, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - deployment_state: DeploymentState = proto.Field( - proto.ENUM, - number=8, - enum=DeploymentState, - ) - etag: str = proto.Field( - proto.STRING, - number=10, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=34, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/model_evaluation.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/model_evaluation.py deleted file mode 100644 index 7f6cd62d42ef..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/model_evaluation.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import classification -from google.cloud.automl_v1.types import detection -from google.cloud.automl_v1.types import text_extraction -from google.cloud.automl_v1.types import text_sentiment -from google.cloud.automl_v1.types import translation -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'ModelEvaluation', - }, -) - - -class ModelEvaluation(proto.Message): - r"""Evaluation results of a model. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - classification_evaluation_metrics (google.cloud.automl_v1.types.ClassificationEvaluationMetrics): - Model evaluation metrics for image, text, - video and tables classification. - Tables problem is considered a classification - when the target column is CATEGORY DataType. - - This field is a member of `oneof`_ ``metrics``. - translation_evaluation_metrics (google.cloud.automl_v1.types.TranslationEvaluationMetrics): - Model evaluation metrics for translation. - - This field is a member of `oneof`_ ``metrics``. - image_object_detection_evaluation_metrics (google.cloud.automl_v1.types.ImageObjectDetectionEvaluationMetrics): - Model evaluation metrics for image object - detection. - - This field is a member of `oneof`_ ``metrics``. - text_sentiment_evaluation_metrics (google.cloud.automl_v1.types.TextSentimentEvaluationMetrics): - Evaluation metrics for text sentiment models. - - This field is a member of `oneof`_ ``metrics``. - text_extraction_evaluation_metrics (google.cloud.automl_v1.types.TextExtractionEvaluationMetrics): - Evaluation metrics for text extraction - models. - - This field is a member of `oneof`_ ``metrics``. - name (str): - Output only. Resource name of the model evaluation. Format: - ``projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}`` - annotation_spec_id (str): - Output only. The ID of the annotation spec that the model - evaluation applies to. The The ID is empty for the overall - model evaluation. For Tables annotation specs in the dataset - do not exist and this ID is always not set, but for - CLASSIFICATION - [prediction_type-s][google.cloud.automl.v1.TablesModelMetadata.prediction_type] - the - [display_name][google.cloud.automl.v1.ModelEvaluation.display_name] - field is used. - display_name (str): - Output only. The value of - [display_name][google.cloud.automl.v1.AnnotationSpec.display_name] - at the moment when the model was trained. Because this field - returns a value at model training time, for different models - trained from the same dataset, the values may differ, since - display names could had been changed between the two model's - trainings. For Tables CLASSIFICATION - [prediction_type-s][google.cloud.automl.v1.TablesModelMetadata.prediction_type] - distinct values of the target column at the moment of the - model evaluation are populated here. The display_name is - empty for the overall model evaluation. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when this model - evaluation was created. - evaluated_example_count (int): - Output only. The number of examples used for model - evaluation, i.e. for which ground truth from time of model - creation is compared against the predicted annotations - created by the model. For overall ModelEvaluation (i.e. with - annotation_spec_id not set) this is the total number of all - examples used for evaluation. Otherwise, this is the count - of examples that according to the ground truth were - annotated by the - [annotation_spec_id][google.cloud.automl.v1.ModelEvaluation.annotation_spec_id]. - """ - - classification_evaluation_metrics: classification.ClassificationEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=8, - oneof='metrics', - message=classification.ClassificationEvaluationMetrics, - ) - translation_evaluation_metrics: translation.TranslationEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=9, - oneof='metrics', - message=translation.TranslationEvaluationMetrics, - ) - image_object_detection_evaluation_metrics: detection.ImageObjectDetectionEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=12, - oneof='metrics', - message=detection.ImageObjectDetectionEvaluationMetrics, - ) - text_sentiment_evaluation_metrics: text_sentiment.TextSentimentEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=11, - oneof='metrics', - message=text_sentiment.TextSentimentEvaluationMetrics, - ) - text_extraction_evaluation_metrics: text_extraction.TextExtractionEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=13, - oneof='metrics', - message=text_extraction.TextExtractionEvaluationMetrics, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - annotation_spec_id: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=15, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - evaluated_example_count: int = proto.Field( - proto.INT32, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/operations.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/operations.py deleted file mode 100644 index 49fabd8e4da7..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/operations.py +++ /dev/null @@ -1,330 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import io -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'OperationMetadata', - 'DeleteOperationMetadata', - 'DeployModelOperationMetadata', - 'UndeployModelOperationMetadata', - 'CreateDatasetOperationMetadata', - 'CreateModelOperationMetadata', - 'ImportDataOperationMetadata', - 'ExportDataOperationMetadata', - 'BatchPredictOperationMetadata', - 'ExportModelOperationMetadata', - }, -) - - -class OperationMetadata(proto.Message): - r"""Metadata used across all long running operations returned by - AutoML API. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - delete_details (google.cloud.automl_v1.types.DeleteOperationMetadata): - Details of a Delete operation. - - This field is a member of `oneof`_ ``details``. - deploy_model_details (google.cloud.automl_v1.types.DeployModelOperationMetadata): - Details of a DeployModel operation. - - This field is a member of `oneof`_ ``details``. - undeploy_model_details (google.cloud.automl_v1.types.UndeployModelOperationMetadata): - Details of an UndeployModel operation. - - This field is a member of `oneof`_ ``details``. - create_model_details (google.cloud.automl_v1.types.CreateModelOperationMetadata): - Details of CreateModel operation. - - This field is a member of `oneof`_ ``details``. - create_dataset_details (google.cloud.automl_v1.types.CreateDatasetOperationMetadata): - Details of CreateDataset operation. - - This field is a member of `oneof`_ ``details``. - import_data_details (google.cloud.automl_v1.types.ImportDataOperationMetadata): - Details of ImportData operation. - - This field is a member of `oneof`_ ``details``. - batch_predict_details (google.cloud.automl_v1.types.BatchPredictOperationMetadata): - Details of BatchPredict operation. - - This field is a member of `oneof`_ ``details``. - export_data_details (google.cloud.automl_v1.types.ExportDataOperationMetadata): - Details of ExportData operation. - - This field is a member of `oneof`_ ``details``. - export_model_details (google.cloud.automl_v1.types.ExportModelOperationMetadata): - Details of ExportModel operation. - - This field is a member of `oneof`_ ``details``. - progress_percent (int): - Output only. Progress of operation. Range: [0, 100]. Not - used currently. - partial_failures (MutableSequence[google.rpc.status_pb2.Status]): - Output only. Partial failures encountered. - E.g. single files that couldn't be read. - This field should never exceed 20 entries. - Status details field will contain standard GCP - error details. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation was - updated for the last time. - """ - - delete_details: 'DeleteOperationMetadata' = proto.Field( - proto.MESSAGE, - number=8, - oneof='details', - message='DeleteOperationMetadata', - ) - deploy_model_details: 'DeployModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=24, - oneof='details', - message='DeployModelOperationMetadata', - ) - undeploy_model_details: 'UndeployModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=25, - oneof='details', - message='UndeployModelOperationMetadata', - ) - create_model_details: 'CreateModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=10, - oneof='details', - message='CreateModelOperationMetadata', - ) - create_dataset_details: 'CreateDatasetOperationMetadata' = proto.Field( - proto.MESSAGE, - number=30, - oneof='details', - message='CreateDatasetOperationMetadata', - ) - import_data_details: 'ImportDataOperationMetadata' = proto.Field( - proto.MESSAGE, - number=15, - oneof='details', - message='ImportDataOperationMetadata', - ) - batch_predict_details: 'BatchPredictOperationMetadata' = proto.Field( - proto.MESSAGE, - number=16, - oneof='details', - message='BatchPredictOperationMetadata', - ) - export_data_details: 'ExportDataOperationMetadata' = proto.Field( - proto.MESSAGE, - number=21, - oneof='details', - message='ExportDataOperationMetadata', - ) - export_model_details: 'ExportModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=22, - oneof='details', - message='ExportModelOperationMetadata', - ) - progress_percent: int = proto.Field( - proto.INT32, - number=13, - ) - partial_failures: MutableSequence[status_pb2.Status] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class DeleteOperationMetadata(proto.Message): - r"""Details of operations that perform deletes of any entities. - """ - - -class DeployModelOperationMetadata(proto.Message): - r"""Details of DeployModel operation. - """ - - -class UndeployModelOperationMetadata(proto.Message): - r"""Details of UndeployModel operation. - """ - - -class CreateDatasetOperationMetadata(proto.Message): - r"""Details of CreateDataset operation. - """ - - -class CreateModelOperationMetadata(proto.Message): - r"""Details of CreateModel operation. - """ - - -class ImportDataOperationMetadata(proto.Message): - r"""Details of ImportData operation. - """ - - -class ExportDataOperationMetadata(proto.Message): - r"""Details of ExportData operation. - - Attributes: - output_info (google.cloud.automl_v1.types.ExportDataOperationMetadata.ExportDataOutputInfo): - Output only. Information further describing - this export data's output. - """ - - class ExportDataOutputInfo(proto.Message): - r"""Further describes this export data's output. Supplements - [OutputConfig][google.cloud.automl.v1.OutputConfig]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_output_directory (str): - The full path of the Google Cloud Storage - directory created, into which the exported data - is written. - - This field is a member of `oneof`_ ``output_location``. - """ - - gcs_output_directory: str = proto.Field( - proto.STRING, - number=1, - oneof='output_location', - ) - - output_info: ExportDataOutputInfo = proto.Field( - proto.MESSAGE, - number=1, - message=ExportDataOutputInfo, - ) - - -class BatchPredictOperationMetadata(proto.Message): - r"""Details of BatchPredict operation. - - Attributes: - input_config (google.cloud.automl_v1.types.BatchPredictInputConfig): - Output only. The input config that was given - upon starting this batch predict operation. - output_info (google.cloud.automl_v1.types.BatchPredictOperationMetadata.BatchPredictOutputInfo): - Output only. Information further describing - this batch predict's output. - """ - - class BatchPredictOutputInfo(proto.Message): - r"""Further describes this batch predict's output. Supplements - [BatchPredictOutputConfig][google.cloud.automl.v1.BatchPredictOutputConfig]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_output_directory (str): - The full path of the Google Cloud Storage - directory created, into which the prediction - output is written. - - This field is a member of `oneof`_ ``output_location``. - """ - - gcs_output_directory: str = proto.Field( - proto.STRING, - number=1, - oneof='output_location', - ) - - input_config: io.BatchPredictInputConfig = proto.Field( - proto.MESSAGE, - number=1, - message=io.BatchPredictInputConfig, - ) - output_info: BatchPredictOutputInfo = proto.Field( - proto.MESSAGE, - number=2, - message=BatchPredictOutputInfo, - ) - - -class ExportModelOperationMetadata(proto.Message): - r"""Details of ExportModel operation. - - Attributes: - output_info (google.cloud.automl_v1.types.ExportModelOperationMetadata.ExportModelOutputInfo): - Output only. Information further describing - the output of this model export. - """ - - class ExportModelOutputInfo(proto.Message): - r"""Further describes the output of model export. Supplements - [ModelExportOutputConfig][google.cloud.automl.v1.ModelExportOutputConfig]. - - Attributes: - gcs_output_directory (str): - The full path of the Google Cloud Storage - directory created, into which the model will be - exported. - """ - - gcs_output_directory: str = proto.Field( - proto.STRING, - number=1, - ) - - output_info: ExportModelOutputInfo = proto.Field( - proto.MESSAGE, - number=2, - message=ExportModelOutputInfo, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/prediction_service.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/prediction_service.py deleted file mode 100644 index 5e0a1fe314f9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/prediction_service.py +++ /dev/null @@ -1,302 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import annotation_payload -from google.cloud.automl_v1.types import data_items -from google.cloud.automl_v1.types import io - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'PredictRequest', - 'PredictResponse', - 'BatchPredictRequest', - 'BatchPredictResult', - }, -) - - -class PredictRequest(proto.Message): - r"""Request message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - - Attributes: - name (str): - Required. Name of the model requested to - serve the prediction. - payload (google.cloud.automl_v1.types.ExamplePayload): - Required. Payload to perform a prediction on. - The payload must match the problem type that the - model was trained to solve. - params (MutableMapping[str, str]): - Additional domain-specific parameters, any string must be up - to 25000 characters long. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When - the model makes predictions for an image, it will only - produce results that have at least this confidence score. - The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects on - the image, it will only produce bounding boxes which have at - least this confidence score. Value in 0 to 1 range, default - is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number of - bounding boxes returned. The default is 100. The number of - returned bounding boxes might be limited by the server. - - AutoML Tables - - ``feature_importance`` : (boolean) Whether - [feature_importance][google.cloud.automl.v1.TablesModelColumnInfo.feature_importance] - is populated in the returned list of - [TablesAnnotation][google.cloud.automl.v1.TablesAnnotation] - objects. The default is false. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - payload: data_items.ExamplePayload = proto.Field( - proto.MESSAGE, - number=2, - message=data_items.ExamplePayload, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class PredictResponse(proto.Message): - r"""Response message for - [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. - - Attributes: - payload (MutableSequence[google.cloud.automl_v1.types.AnnotationPayload]): - Prediction result. - AutoML Translation and AutoML Natural Language - Sentiment Analysis return precisely one payload. - preprocessed_input (google.cloud.automl_v1.types.ExamplePayload): - The preprocessed example that AutoML actually makes - prediction on. Empty if AutoML does not preprocess the input - example. - - For AutoML Natural Language (Classification, Entity - Extraction, and Sentiment Analysis), if the input is a - document, the recognized text is returned in the - [document_text][google.cloud.automl.v1.Document.document_text] - property. - metadata (MutableMapping[str, str]): - Additional domain-specific prediction response metadata. - - AutoML Vision Object Detection - - ``max_bounding_box_count`` : (int64) The maximum number of - bounding boxes to return per image. - - AutoML Natural Language Sentiment Analysis - - ``sentiment_score`` : (float, deprecated) A value between -1 - and 1, -1 maps to least positive sentiment, while 1 maps to - the most positive one and the higher the score, the more - positive the sentiment in the document is. Yet these values - are relative to the training data, so e.g. if all data was - positive then -1 is also positive (though the least). - ``sentiment_score`` is not the same as "score" and - "magnitude" from Sentiment Analysis in the Natural Language - API. - """ - - payload: MutableSequence[annotation_payload.AnnotationPayload] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=annotation_payload.AnnotationPayload, - ) - preprocessed_input: data_items.ExamplePayload = proto.Field( - proto.MESSAGE, - number=3, - message=data_items.ExamplePayload, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class BatchPredictRequest(proto.Message): - r"""Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - - Attributes: - name (str): - Required. Name of the model requested to - serve the batch prediction. - input_config (google.cloud.automl_v1.types.BatchPredictInputConfig): - Required. The input configuration for batch - prediction. - output_config (google.cloud.automl_v1.types.BatchPredictOutputConfig): - Required. The Configuration specifying where - output predictions should be written. - params (MutableMapping[str, str]): - Additional domain-specific parameters for the predictions, - any string must be up to 25000 characters long. - - AutoML Natural Language Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When - the model makes predictions for a text snippet, it will only - produce results that have at least this confidence score. - The default is 0.5. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When - the model makes predictions for an image, it will only - produce results that have at least this confidence score. - The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects on - the image, it will only produce bounding boxes which have at - least this confidence score. Value in 0 to 1 range, default - is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number of - bounding boxes returned per image. The default is 100, the - number of bounding boxes returned might be limited by the - server. AutoML Video Intelligence Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When - the model makes predictions for a video, it will only - produce results that have at least this confidence score. - The default is 0.5. - - ``segment_classification`` : (boolean) Set to true to - request segment-level classification. AutoML Video - Intelligence returns labels and their confidence scores for - the entire segment of the video that user specified in the - request configuration. The default is true. - - ``shot_classification`` : (boolean) Set to true to request - shot-level classification. AutoML Video Intelligence - determines the boundaries for each camera shot in the entire - segment of the video that user specified in the request - configuration. AutoML Video Intelligence then returns labels - and their confidence scores for each detected shot, along - with the start and end time of the shot. The default is - false. - - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on training - data, but there are no metrics provided to describe that - quality. - - ``1s_interval_classification`` : (boolean) Set to true to - request classification for a video at one-second intervals. - AutoML Video Intelligence returns labels and their - confidence scores for each second of the entire segment of - the video that user specified in the request configuration. - The default is false. - - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on training - data, but there are no metrics provided to describe that - quality. - - AutoML Video Intelligence Object Tracking - - ``score_threshold`` : (float) When Model detects objects on - video frames, it will only produce bounding boxes which have - at least this confidence score. Value in 0 to 1 range, - default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number of - bounding boxes returned per image. The default is 100, the - number of bounding boxes returned might be limited by the - server. - - ``min_bounding_box_size`` : (float) Only bounding boxes with - shortest edge at least that long as a relative value of - video frame size are returned. Value in 0 to 1 range. - Default is 0. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - input_config: io.BatchPredictInputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.BatchPredictInputConfig, - ) - output_config: io.BatchPredictOutputConfig = proto.Field( - proto.MESSAGE, - number=4, - message=io.BatchPredictOutputConfig, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class BatchPredictResult(proto.Message): - r"""Result of the Batch Predict. This message is returned in - [response][google.longrunning.Operation.response] of the operation - returned by the - [PredictionService.BatchPredict][google.cloud.automl.v1.PredictionService.BatchPredict]. - - Attributes: - metadata (MutableMapping[str, str]): - Additional domain-specific prediction response metadata. - - AutoML Vision Object Detection - - ``max_bounding_box_count`` : (int64) The maximum number of - bounding boxes returned per image. - - AutoML Video Intelligence Object Tracking - - ``max_bounding_box_count`` : (int64) The maximum number of - bounding boxes returned per frame. - """ - - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/service.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/service.py deleted file mode 100644 index 3b371fe7689a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/service.py +++ /dev/null @@ -1,621 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import image -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation as gca_model_evaluation -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'CreateDatasetRequest', - 'GetDatasetRequest', - 'ListDatasetsRequest', - 'ListDatasetsResponse', - 'UpdateDatasetRequest', - 'DeleteDatasetRequest', - 'ImportDataRequest', - 'ExportDataRequest', - 'GetAnnotationSpecRequest', - 'CreateModelRequest', - 'GetModelRequest', - 'ListModelsRequest', - 'ListModelsResponse', - 'DeleteModelRequest', - 'UpdateModelRequest', - 'DeployModelRequest', - 'UndeployModelRequest', - 'ExportModelRequest', - 'GetModelEvaluationRequest', - 'ListModelEvaluationsRequest', - 'ListModelEvaluationsResponse', - }, -) - - -class CreateDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1.AutoMl.CreateDataset]. - - Attributes: - parent (str): - Required. The resource name of the project to - create the dataset for. - dataset (google.cloud.automl_v1.types.Dataset): - Required. The dataset to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - dataset: gca_dataset.Dataset = proto.Field( - proto.MESSAGE, - number=2, - message=gca_dataset.Dataset, - ) - - -class GetDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.GetDataset][google.cloud.automl.v1.AutoMl.GetDataset]. - - Attributes: - name (str): - Required. The resource name of the dataset to - retrieve. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDatasetsRequest(proto.Message): - r"""Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - - Attributes: - parent (str): - Required. The resource name of the project - from which to list datasets. - filter (str): - An expression for filtering the results of the request. - - - ``dataset_metadata`` - for existence of the case (e.g. - ``image_classification_dataset_metadata:*``). Some - examples of using the filter are: - - - ``translation_dataset_metadata:*`` --> The dataset has - ``translation_dataset_metadata``. - page_size (int): - Requested page size. Server may return fewer - results than requested. If unspecified, server - will pick a default size. - page_token (str): - A token identifying a page of results for the server to - return Typically obtained via - [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] - of the previous - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListDatasetsResponse(proto.Message): - r"""Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. - - Attributes: - datasets (MutableSequence[google.cloud.automl_v1.types.Dataset]): - The datasets read. - next_page_token (str): - A token to retrieve next page of results. Pass to - [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] - to obtain that page. - """ - - @property - def raw_page(self): - return self - - datasets: MutableSequence[gca_dataset.Dataset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] - - Attributes: - dataset (google.cloud.automl_v1.types.Dataset): - Required. The dataset which replaces the - resource on the server. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The update mask applies to the - resource. - """ - - dataset: gca_dataset.Dataset = proto.Field( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. - - Attributes: - name (str): - Required. The resource name of the dataset to - delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ImportDataRequest(proto.Message): - r"""Request message for - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. - - Attributes: - name (str): - Required. Dataset name. Dataset must already - exist. All imported annotations and examples - will be added. - input_config (google.cloud.automl_v1.types.InputConfig): - Required. The desired input location and its - domain specific semantics, if any. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - input_config: io.InputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.InputConfig, - ) - - -class ExportDataRequest(proto.Message): - r"""Request message for - [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. - - Attributes: - name (str): - Required. The resource name of the dataset. - output_config (google.cloud.automl_v1.types.OutputConfig): - Required. The desired output location. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - output_config: io.OutputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.OutputConfig, - ) - - -class GetAnnotationSpecRequest(proto.Message): - r"""Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1.AutoMl.GetAnnotationSpec]. - - Attributes: - name (str): - Required. The resource name of the annotation - spec to retrieve. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateModelRequest(proto.Message): - r"""Request message for - [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. - - Attributes: - parent (str): - Required. Resource name of the parent project - where the model is being created. - model (google.cloud.automl_v1.types.Model): - Required. The model to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - model: gca_model.Model = proto.Field( - proto.MESSAGE, - number=4, - message=gca_model.Model, - ) - - -class GetModelRequest(proto.Message): - r"""Request message for - [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. - - Attributes: - name (str): - Required. Resource name of the model. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListModelsRequest(proto.Message): - r"""Request message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - - Attributes: - parent (str): - Required. Resource name of the project, from - which to list the models. - filter (str): - An expression for filtering the results of the request. - - - ``model_metadata`` - for existence of the case (e.g. - ``video_classification_model_metadata:*``). - - - ``dataset_id`` - for = or !=. Some examples of using the - filter are: - - - ``image_classification_model_metadata:*`` --> The model - has ``image_classification_model_metadata``. - - - ``dataset_id=5`` --> The model was created from a dataset - with ID 5. - page_size (int): - Requested page size. - page_token (str): - A token identifying a page of results for the server to - return Typically obtained via - [ListModelsResponse.next_page_token][google.cloud.automl.v1.ListModelsResponse.next_page_token] - of the previous - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListModelsResponse(proto.Message): - r"""Response message for - [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. - - Attributes: - model (MutableSequence[google.cloud.automl_v1.types.Model]): - List of models in the requested page. - next_page_token (str): - A token to retrieve next page of results. Pass to - [ListModelsRequest.page_token][google.cloud.automl.v1.ListModelsRequest.page_token] - to obtain that page. - """ - - @property - def raw_page(self): - return self - - model: MutableSequence[gca_model.Model] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteModelRequest(proto.Message): - r"""Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. - - Attributes: - name (str): - Required. Resource name of the model being - deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateModelRequest(proto.Message): - r"""Request message for - [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] - - Attributes: - model (google.cloud.automl_v1.types.Model): - Required. The model which replaces the - resource on the server. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The update mask applies to the - resource. - """ - - model: gca_model.Model = proto.Field( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeployModelRequest(proto.Message): - r"""Request message for - [AutoMl.DeployModel][google.cloud.automl.v1.AutoMl.DeployModel]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image_object_detection_model_deployment_metadata (google.cloud.automl_v1.types.ImageObjectDetectionModelDeploymentMetadata): - Model deployment metadata specific to Image - Object Detection. - - This field is a member of `oneof`_ ``model_deployment_metadata``. - image_classification_model_deployment_metadata (google.cloud.automl_v1.types.ImageClassificationModelDeploymentMetadata): - Model deployment metadata specific to Image - Classification. - - This field is a member of `oneof`_ ``model_deployment_metadata``. - name (str): - Required. Resource name of the model to - deploy. - """ - - image_object_detection_model_deployment_metadata: image.ImageObjectDetectionModelDeploymentMetadata = proto.Field( - proto.MESSAGE, - number=2, - oneof='model_deployment_metadata', - message=image.ImageObjectDetectionModelDeploymentMetadata, - ) - image_classification_model_deployment_metadata: image.ImageClassificationModelDeploymentMetadata = proto.Field( - proto.MESSAGE, - number=4, - oneof='model_deployment_metadata', - message=image.ImageClassificationModelDeploymentMetadata, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UndeployModelRequest(proto.Message): - r"""Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1.AutoMl.UndeployModel]. - - Attributes: - name (str): - Required. Resource name of the model to - undeploy. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ExportModelRequest(proto.Message): - r"""Request message for - [AutoMl.ExportModel][google.cloud.automl.v1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an error code - will be returned. - - Attributes: - name (str): - Required. The resource name of the model to - export. - output_config (google.cloud.automl_v1.types.ModelExportOutputConfig): - Required. The desired output location and - configuration. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - output_config: io.ModelExportOutputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.ModelExportOutputConfig, - ) - - -class GetModelEvaluationRequest(proto.Message): - r"""Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. - - Attributes: - name (str): - Required. Resource name for the model - evaluation. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListModelEvaluationsRequest(proto.Message): - r"""Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - - Attributes: - parent (str): - Required. Resource name of the model to list - the model evaluations for. If modelId is set as - "-", this will list model evaluations from - across all models of the parent location. - filter (str): - Required. An expression for filtering the results of the - request. - - - ``annotation_spec_id`` - for =, != or existence. See - example below for the last. - - Some examples of using the filter are: - - - ``annotation_spec_id!=4`` --> The model evaluation was - done for annotation spec with ID different than 4. - - ``NOT annotation_spec_id:*`` --> The model evaluation was - done for aggregate of all annotation specs. - page_size (int): - Requested page size. - page_token (str): - A token identifying a page of results for the server to - return. Typically obtained via - [ListModelEvaluationsResponse.next_page_token][google.cloud.automl.v1.ListModelEvaluationsResponse.next_page_token] - of the previous - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListModelEvaluationsResponse(proto.Message): - r"""Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. - - Attributes: - model_evaluation (MutableSequence[google.cloud.automl_v1.types.ModelEvaluation]): - List of model evaluations in the requested - page. - next_page_token (str): - A token to retrieve next page of results. Pass to the - [ListModelEvaluationsRequest.page_token][google.cloud.automl.v1.ListModelEvaluationsRequest.page_token] - field of a new - [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] - request to obtain that page. - """ - - @property - def raw_page(self): - return self - - model_evaluation: MutableSequence[gca_model_evaluation.ModelEvaluation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model_evaluation.ModelEvaluation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text.py deleted file mode 100644 index 9549adcc4f0b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import classification - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'TextClassificationDatasetMetadata', - 'TextClassificationModelMetadata', - 'TextExtractionDatasetMetadata', - 'TextExtractionModelMetadata', - 'TextSentimentDatasetMetadata', - 'TextSentimentModelMetadata', - }, -) - - -class TextClassificationDatasetMetadata(proto.Message): - r"""Dataset metadata for classification. - - Attributes: - classification_type (google.cloud.automl_v1.types.ClassificationType): - Required. Type of the classification problem. - """ - - classification_type: classification.ClassificationType = proto.Field( - proto.ENUM, - number=1, - enum=classification.ClassificationType, - ) - - -class TextClassificationModelMetadata(proto.Message): - r"""Model metadata that is specific to text classification. - - Attributes: - classification_type (google.cloud.automl_v1.types.ClassificationType): - Output only. Classification type of the - dataset used to train this model. - """ - - classification_type: classification.ClassificationType = proto.Field( - proto.ENUM, - number=3, - enum=classification.ClassificationType, - ) - - -class TextExtractionDatasetMetadata(proto.Message): - r"""Dataset metadata that is specific to text extraction - """ - - -class TextExtractionModelMetadata(proto.Message): - r"""Model metadata that is specific to text extraction. - """ - - -class TextSentimentDatasetMetadata(proto.Message): - r"""Dataset metadata for text sentiment. - - Attributes: - sentiment_max (int): - Required. A sentiment is expressed as an integer ordinal, - where higher value means a more positive sentiment. The - range of sentiments that will be used is between 0 and - sentiment_max (inclusive on both ends), and all the values - in the range must be represented in the dataset before a - model can be created. sentiment_max value must be between 1 - and 10 (inclusive). - """ - - sentiment_max: int = proto.Field( - proto.INT32, - number=1, - ) - - -class TextSentimentModelMetadata(proto.Message): - r"""Model metadata that is specific to text sentiment. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_extraction.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_extraction.py deleted file mode 100644 index 63750fad5c78..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_extraction.py +++ /dev/null @@ -1,125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import text_segment as gca_text_segment - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'TextExtractionAnnotation', - 'TextExtractionEvaluationMetrics', - }, -) - - -class TextExtractionAnnotation(proto.Message): - r"""Annotation for identifying spans of text. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - text_segment (google.cloud.automl_v1.types.TextSegment): - An entity annotation will set this, which is - the part of the original text to which the - annotation pertains. - - This field is a member of `oneof`_ ``annotation``. - score (float): - Output only. A confidence estimate between - 0.0 and 1.0. A higher value means greater - confidence in correctness of the annotation. - """ - - text_segment: gca_text_segment.TextSegment = proto.Field( - proto.MESSAGE, - number=3, - oneof='annotation', - message=gca_text_segment.TextSegment, - ) - score: float = proto.Field( - proto.FLOAT, - number=1, - ) - - -class TextExtractionEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for text extraction problems. - - Attributes: - au_prc (float): - Output only. The Area under precision recall - curve metric. - confidence_metrics_entries (MutableSequence[google.cloud.automl_v1.types.TextExtractionEvaluationMetrics.ConfidenceMetricsEntry]): - Output only. Metrics that have confidence - thresholds. Precision-recall curve can be - derived from it. - """ - - class ConfidenceMetricsEntry(proto.Message): - r"""Metrics for a single confidence threshold. - - Attributes: - confidence_threshold (float): - Output only. The confidence threshold value - used to compute the metrics. Only annotations - with score of at least this threshold are - considered to be ones the model would return. - recall (float): - Output only. Recall under the given - confidence threshold. - precision (float): - Output only. Precision under the given - confidence threshold. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - """ - - confidence_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - recall: float = proto.Field( - proto.FLOAT, - number=3, - ) - precision: float = proto.Field( - proto.FLOAT, - number=4, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=5, - ) - - au_prc: float = proto.Field( - proto.FLOAT, - number=1, - ) - confidence_metrics_entries: MutableSequence[ConfidenceMetricsEntry] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ConfidenceMetricsEntry, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_segment.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_segment.py deleted file mode 100644 index 7542cf875a80..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_segment.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'TextSegment', - }, -) - - -class TextSegment(proto.Message): - r"""A contiguous part of a text (string), assuming it has an - UTF-8 NFC encoding. - - Attributes: - content (str): - Output only. The content of the TextSegment. - start_offset (int): - Required. Zero-based character index of the - first character of the text segment (counting - characters from the beginning of the text). - end_offset (int): - Required. Zero-based character index of the first character - past the end of the text segment (counting character from - the beginning of the text). The character at the end_offset - is NOT included in the text segment. - """ - - content: str = proto.Field( - proto.STRING, - number=3, - ) - start_offset: int = proto.Field( - proto.INT64, - number=1, - ) - end_offset: int = proto.Field( - proto.INT64, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_sentiment.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_sentiment.py deleted file mode 100644 index a164577af9fa..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/text_sentiment.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import classification - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'TextSentimentAnnotation', - 'TextSentimentEvaluationMetrics', - }, -) - - -class TextSentimentAnnotation(proto.Message): - r"""Contains annotation details specific to text sentiment. - - Attributes: - sentiment (int): - Output only. The sentiment with the semantic, as given to - the - [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData] - when populating the dataset from which the model used for - the prediction had been trained. The sentiment values are - between 0 and - Dataset.text_sentiment_dataset_metadata.sentiment_max - (inclusive), with higher value meaning more positive - sentiment. They are completely relative, i.e. 0 means least - positive sentiment and sentiment_max means the most positive - from the sentiments present in the train data. Therefore - e.g. if train data had only negative sentiment, then - sentiment_max, would be still negative (although least - negative). The sentiment shouldn't be confused with "score" - or "magnitude" from the previous Natural Language Sentiment - Analysis API. - """ - - sentiment: int = proto.Field( - proto.INT32, - number=1, - ) - - -class TextSentimentEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for text sentiment problems. - - Attributes: - precision (float): - Output only. Precision. - recall (float): - Output only. Recall. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - mean_absolute_error (float): - Output only. Mean absolute error. Only set - for the overall model evaluation, not for - evaluation of a single annotation spec. - mean_squared_error (float): - Output only. Mean squared error. Only set for - the overall model evaluation, not for evaluation - of a single annotation spec. - linear_kappa (float): - Output only. Linear weighted kappa. Only set - for the overall model evaluation, not for - evaluation of a single annotation spec. - quadratic_kappa (float): - Output only. Quadratic weighted kappa. Only - set for the overall model evaluation, not for - evaluation of a single annotation spec. - confusion_matrix (google.cloud.automl_v1.types.ClassificationEvaluationMetrics.ConfusionMatrix): - Output only. Confusion matrix of the - evaluation. Only set for the overall model - evaluation, not for evaluation of a single - annotation spec. - """ - - precision: float = proto.Field( - proto.FLOAT, - number=1, - ) - recall: float = proto.Field( - proto.FLOAT, - number=2, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=3, - ) - mean_absolute_error: float = proto.Field( - proto.FLOAT, - number=4, - ) - mean_squared_error: float = proto.Field( - proto.FLOAT, - number=5, - ) - linear_kappa: float = proto.Field( - proto.FLOAT, - number=6, - ) - quadratic_kappa: float = proto.Field( - proto.FLOAT, - number=7, - ) - confusion_matrix: classification.ClassificationEvaluationMetrics.ConfusionMatrix = proto.Field( - proto.MESSAGE, - number=8, - message=classification.ClassificationEvaluationMetrics.ConfusionMatrix, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/translation.py b/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/translation.py deleted file mode 100644 index 162230927731..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/google/cloud/automl_v1/types/translation.py +++ /dev/null @@ -1,125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1.types import data_items - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1', - manifest={ - 'TranslationDatasetMetadata', - 'TranslationEvaluationMetrics', - 'TranslationModelMetadata', - 'TranslationAnnotation', - }, -) - - -class TranslationDatasetMetadata(proto.Message): - r"""Dataset metadata that is specific to translation. - - Attributes: - source_language_code (str): - Required. The BCP-47 language code of the - source language. - target_language_code (str): - Required. The BCP-47 language code of the - target language. - """ - - source_language_code: str = proto.Field( - proto.STRING, - number=1, - ) - target_language_code: str = proto.Field( - proto.STRING, - number=2, - ) - - -class TranslationEvaluationMetrics(proto.Message): - r"""Evaluation metrics for the dataset. - - Attributes: - bleu_score (float): - Output only. BLEU score. - base_bleu_score (float): - Output only. BLEU score for base model. - """ - - bleu_score: float = proto.Field( - proto.DOUBLE, - number=1, - ) - base_bleu_score: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - -class TranslationModelMetadata(proto.Message): - r"""Model metadata that is specific to translation. - - Attributes: - base_model (str): - The resource name of the model to use as a baseline to train - the custom model. If unset, we use the default base model - provided by Google Translate. Format: - ``projects/{project_id}/locations/{location_id}/models/{model_id}`` - source_language_code (str): - Output only. Inferred from the dataset. - The source language (The BCP-47 language code) - that is used for training. - target_language_code (str): - Output only. The target language (The BCP-47 - language code) that is used for training. - """ - - base_model: str = proto.Field( - proto.STRING, - number=1, - ) - source_language_code: str = proto.Field( - proto.STRING, - number=2, - ) - target_language_code: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TranslationAnnotation(proto.Message): - r"""Annotation details specific to translation. - - Attributes: - translated_content (google.cloud.automl_v1.types.TextSnippet): - Output only . The translated content. - """ - - translated_content: data_items.TextSnippet = proto.Field( - proto.MESSAGE, - number=1, - message=data_items.TextSnippet, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1/mypy.ini b/owl-bot-staging/google-cloud-automl/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-automl/v1/noxfile.py b/owl-bot-staging/google-cloud-automl/v1/noxfile.py deleted file mode 100644 index 429785851b92..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-automl' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/automl_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/automl_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_dataset_async.py deleted file mode 100644 index 0294235a59c1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_dataset_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_CreateDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_create_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - operation = client.create_dataset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_CreateDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_dataset_sync.py deleted file mode 100644 index d41ffe2d0e21..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_dataset_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_CreateDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_create_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - operation = client.create_dataset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_CreateDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_model_async.py deleted file mode 100644 index 7f5ccdacf7f0..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_CreateModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_create_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_CreateModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_model_sync.py deleted file mode 100644 index 2de803ba5337..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_create_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_CreateModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_create_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_CreateModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_dataset_async.py deleted file mode 100644 index 7fc82eab02e9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_dataset_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_DeleteDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_delete_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_DeleteDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_dataset_sync.py deleted file mode 100644 index 97b9b69a2edf..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_dataset_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_DeleteDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_delete_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_DeleteDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_model_async.py deleted file mode 100644 index f47caeda554a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_DeleteModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_delete_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_DeleteModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_model_sync.py deleted file mode 100644 index cb98869ec57d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_delete_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_DeleteModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_delete_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_DeleteModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_deploy_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_deploy_model_async.py deleted file mode 100644 index 0a2221cfdc4a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_deploy_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_DeployModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_deploy_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_DeployModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_deploy_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_deploy_model_sync.py deleted file mode 100644 index 496838c550b9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_deploy_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_DeployModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_deploy_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_DeployModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_data_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_data_async.py deleted file mode 100644 index a6efe54850f8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_data_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ExportData_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_export_data(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - output_config = automl_v1.OutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportDataRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_ExportData_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_data_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_data_sync.py deleted file mode 100644 index e67dab03a8cc..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_data_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ExportData_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_export_data(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - output_config = automl_v1.OutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportDataRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_ExportData_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_model_async.py deleted file mode 100644 index fda0ba543b50..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_model_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ExportModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_export_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - output_config = automl_v1.ModelExportOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportModelRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_ExportModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_model_sync.py deleted file mode 100644 index 9b4e3fc5be1f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_export_model_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ExportModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_export_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - output_config = automl_v1.ModelExportOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.ExportModelRequest( - name="name_value", - output_config=output_config, - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_ExportModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_annotation_spec_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_annotation_spec_async.py deleted file mode 100644 index 17a83791d8af..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_annotation_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAnnotationSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetAnnotationSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_get_annotation_spec(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_annotation_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetAnnotationSpec_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_annotation_spec_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_annotation_spec_sync.py deleted file mode 100644 index 92c6825521bb..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_annotation_spec_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAnnotationSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetAnnotationSpec_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_get_annotation_spec(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_annotation_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetAnnotationSpec_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_dataset_async.py deleted file mode 100644 index 73680f489068..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_dataset_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_get_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_dataset_sync.py deleted file mode 100644 index c126ced200f8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_dataset_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_get_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = client.get_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_async.py deleted file mode 100644 index 21be39739a06..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_get_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_evaluation_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_evaluation_async.py deleted file mode 100644 index 66ec5a9ff8bd..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_evaluation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModelEvaluation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetModelEvaluation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_get_model_evaluation(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model_evaluation(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetModelEvaluation_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_evaluation_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_evaluation_sync.py deleted file mode 100644 index 17ea116a2c3e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_evaluation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModelEvaluation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetModelEvaluation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_get_model_evaluation(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = client.get_model_evaluation(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetModelEvaluation_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_sync.py deleted file mode 100644 index a6df05ff4460..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_get_model_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_GetModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_get_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = client.get_model(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_GetModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_import_data_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_import_data_async.py deleted file mode 100644 index 099aa892836e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_import_data_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ImportData_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_import_data(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - input_config = automl_v1.InputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - request = automl_v1.ImportDataRequest( - name="name_value", - input_config=input_config, - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_ImportData_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_import_data_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_import_data_sync.py deleted file mode 100644 index 6e6314e51224..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_import_data_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ImportData_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_import_data(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - input_config = automl_v1.InputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - request = automl_v1.ImportDataRequest( - name="name_value", - input_config=input_config, - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_ImportData_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_datasets_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_datasets_async.py deleted file mode 100644 index d21a26b8e62b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_datasets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatasets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ListDatasets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_list_datasets(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1_generated_AutoMl_ListDatasets_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_datasets_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_datasets_sync.py deleted file mode 100644 index 485f55f903f1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_datasets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatasets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ListDatasets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_list_datasets(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1_generated_AutoMl_ListDatasets_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_model_evaluations_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_model_evaluations_async.py deleted file mode 100644 index 019b515fab9c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_model_evaluations_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModelEvaluations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ListModelEvaluations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_list_model_evaluations(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.ListModelEvaluationsRequest( - parent="parent_value", - filter="filter_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1_generated_AutoMl_ListModelEvaluations_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_model_evaluations_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_model_evaluations_sync.py deleted file mode 100644 index 1b81b3573380..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_model_evaluations_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModelEvaluations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ListModelEvaluations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_list_model_evaluations(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.ListModelEvaluationsRequest( - parent="parent_value", - filter="filter_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1_generated_AutoMl_ListModelEvaluations_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_models_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_models_async.py deleted file mode 100644 index f73ecd109b1e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_models_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModels -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ListModels_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_list_models(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1_generated_AutoMl_ListModels_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_models_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_models_sync.py deleted file mode 100644 index a2b8c5f12dbe..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_list_models_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModels -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_ListModels_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_list_models(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1_generated_AutoMl_ListModels_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_undeploy_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_undeploy_model_async.py deleted file mode 100644 index 49fc20a20a97..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_undeploy_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UndeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_UndeployModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_undeploy_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_UndeployModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_undeploy_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_undeploy_model_sync.py deleted file mode 100644 index 837d59da671a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_undeploy_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UndeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_UndeployModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_undeploy_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_UndeployModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_dataset_async.py deleted file mode 100644 index 60eec7c38fc1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_dataset_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_UpdateDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_update_dataset(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = await client.update_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_UpdateDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_dataset_sync.py deleted file mode 100644 index 6743151513f4..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_dataset_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_UpdateDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_update_dataset(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = client.update_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_UpdateDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_model_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_model_async.py deleted file mode 100644 index da9b5067a29b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_model_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_UpdateModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_update_model(): - # Create a client - client = automl_v1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1.UpdateModelRequest( - ) - - # Make the request - response = await client.update_model(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_UpdateModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_model_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_model_sync.py deleted file mode 100644 index 0b209f872778..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_auto_ml_update_model_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_AutoMl_UpdateModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_update_model(): - # Create a client - client = automl_v1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1.UpdateModelRequest( - ) - - # Make the request - response = client.update_model(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_AutoMl_UpdateModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_batch_predict_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_batch_predict_async.py deleted file mode 100644 index f77ef80fce26..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_batch_predict_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchPredict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_PredictionService_BatchPredict_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_batch_predict(): - # Create a client - client = automl_v1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - input_config = automl_v1.BatchPredictInputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - output_config = automl_v1.BatchPredictOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.BatchPredictRequest( - name="name_value", - input_config=input_config, - output_config=output_config, - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1_generated_PredictionService_BatchPredict_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_batch_predict_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_batch_predict_sync.py deleted file mode 100644 index 99ebec690b53..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_batch_predict_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchPredict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_PredictionService_BatchPredict_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_batch_predict(): - # Create a client - client = automl_v1.PredictionServiceClient() - - # Initialize request argument(s) - input_config = automl_v1.BatchPredictInputConfig() - input_config.gcs_source.input_uris = ['input_uris_value1', 'input_uris_value2'] - - output_config = automl_v1.BatchPredictOutputConfig() - output_config.gcs_destination.output_uri_prefix = "output_uri_prefix_value" - - request = automl_v1.BatchPredictRequest( - name="name_value", - input_config=input_config, - output_config=output_config, - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1_generated_PredictionService_BatchPredict_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_predict_async.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_predict_async.py deleted file mode 100644 index f5be967d01ca..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_predict_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Predict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_PredictionService_Predict_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -async def sample_predict(): - # Create a client - client = automl_v1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - payload = automl_v1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = await client.predict(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_PredictionService_Predict_async] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_predict_sync.py b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_predict_sync.py deleted file mode 100644 index c5ca4ccefac5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/automl_v1_generated_prediction_service_predict_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Predict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1_generated_PredictionService_Predict_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1 - - -def sample_predict(): - # Create a client - client = automl_v1.PredictionServiceClient() - - # Initialize request argument(s) - payload = automl_v1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = client.predict(request=request) - - # Handle the response - print(response) - -# [END automl_v1_generated_PredictionService_Predict_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json b/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json deleted file mode 100644 index 0d8816baffc0..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json +++ /dev/null @@ -1,3339 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.automl.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-automl", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.create_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.CreateDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.CreateDatasetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_dataset" - }, - "description": "Sample for CreateDataset", - "file": "automl_v1_generated_auto_ml_create_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_CreateDataset_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_create_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.create_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.CreateDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.CreateDatasetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_dataset" - }, - "description": "Sample for CreateDataset", - "file": "automl_v1_generated_auto_ml_create_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_CreateDataset_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_create_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.create_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.CreateModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.CreateModelRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "google.cloud.automl_v1.types.Model" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_model" - }, - "description": "Sample for CreateModel", - "file": "automl_v1_generated_auto_ml_create_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_CreateModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_create_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.create_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.CreateModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.CreateModelRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "google.cloud.automl_v1.types.Model" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_model" - }, - "description": "Sample for CreateModel", - "file": "automl_v1_generated_auto_ml_create_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_CreateModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_create_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.delete_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.DeleteDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.DeleteDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_dataset" - }, - "description": "Sample for DeleteDataset", - "file": "automl_v1_generated_auto_ml_delete_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_DeleteDataset_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_delete_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.delete_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.DeleteDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.DeleteDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_dataset" - }, - "description": "Sample for DeleteDataset", - "file": "automl_v1_generated_auto_ml_delete_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_DeleteDataset_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_delete_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.delete_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.DeleteModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.DeleteModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_model" - }, - "description": "Sample for DeleteModel", - "file": "automl_v1_generated_auto_ml_delete_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_DeleteModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_delete_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.delete_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.DeleteModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.DeleteModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_model" - }, - "description": "Sample for DeleteModel", - "file": "automl_v1_generated_auto_ml_delete_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_DeleteModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_delete_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.deploy_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.DeployModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.DeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "deploy_model" - }, - "description": "Sample for DeployModel", - "file": "automl_v1_generated_auto_ml_deploy_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_DeployModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_deploy_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.deploy_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.DeployModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.DeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "deploy_model" - }, - "description": "Sample for DeployModel", - "file": "automl_v1_generated_auto_ml_deploy_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_DeployModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_deploy_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.export_data", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ExportData", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ExportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1.types.OutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_data" - }, - "description": "Sample for ExportData", - "file": "automl_v1_generated_auto_ml_export_data_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ExportData_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_export_data_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.export_data", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ExportData", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ExportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1.types.OutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_data" - }, - "description": "Sample for ExportData", - "file": "automl_v1_generated_auto_ml_export_data_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ExportData_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_export_data_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.export_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ExportModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ExportModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1.types.ModelExportOutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_model" - }, - "description": "Sample for ExportModel", - "file": "automl_v1_generated_auto_ml_export_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ExportModel_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_export_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.export_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ExportModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ExportModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1.types.ModelExportOutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_model" - }, - "description": "Sample for ExportModel", - "file": "automl_v1_generated_auto_ml_export_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ExportModel_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_export_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.get_annotation_spec", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetAnnotationSpec", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetAnnotationSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetAnnotationSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.AnnotationSpec", - "shortName": "get_annotation_spec" - }, - "description": "Sample for GetAnnotationSpec", - "file": "automl_v1_generated_auto_ml_get_annotation_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetAnnotationSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_annotation_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.get_annotation_spec", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetAnnotationSpec", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetAnnotationSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetAnnotationSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.AnnotationSpec", - "shortName": "get_annotation_spec" - }, - "description": "Sample for GetAnnotationSpec", - "file": "automl_v1_generated_auto_ml_get_annotation_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetAnnotationSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_annotation_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.get_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Dataset", - "shortName": "get_dataset" - }, - "description": "Sample for GetDataset", - "file": "automl_v1_generated_auto_ml_get_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetDataset_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.get_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Dataset", - "shortName": "get_dataset" - }, - "description": "Sample for GetDataset", - "file": "automl_v1_generated_auto_ml_get_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetDataset_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.get_model_evaluation", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetModelEvaluation", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModelEvaluation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetModelEvaluationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.ModelEvaluation", - "shortName": "get_model_evaluation" - }, - "description": "Sample for GetModelEvaluation", - "file": "automl_v1_generated_auto_ml_get_model_evaluation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetModelEvaluation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_model_evaluation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.get_model_evaluation", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetModelEvaluation", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModelEvaluation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetModelEvaluationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.ModelEvaluation", - "shortName": "get_model_evaluation" - }, - "description": "Sample for GetModelEvaluation", - "file": "automl_v1_generated_auto_ml_get_model_evaluation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetModelEvaluation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_model_evaluation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.get_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Model", - "shortName": "get_model" - }, - "description": "Sample for GetModel", - "file": "automl_v1_generated_auto_ml_get_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetModel_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.get_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.GetModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.GetModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Model", - "shortName": "get_model" - }, - "description": "Sample for GetModel", - "file": "automl_v1_generated_auto_ml_get_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_GetModel_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_get_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.import_data", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ImportData", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ImportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ImportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1.types.InputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_data" - }, - "description": "Sample for ImportData", - "file": "automl_v1_generated_auto_ml_import_data_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ImportData_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_import_data_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.import_data", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ImportData", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ImportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ImportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1.types.InputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_data" - }, - "description": "Sample for ImportData", - "file": "automl_v1_generated_auto_ml_import_data_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ImportData_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_import_data_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.list_datasets", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ListDatasets", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListDatasets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ListDatasetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.services.auto_ml.pagers.ListDatasetsAsyncPager", - "shortName": "list_datasets" - }, - "description": "Sample for ListDatasets", - "file": "automl_v1_generated_auto_ml_list_datasets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ListDatasets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_list_datasets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.list_datasets", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ListDatasets", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListDatasets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ListDatasetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.services.auto_ml.pagers.ListDatasetsPager", - "shortName": "list_datasets" - }, - "description": "Sample for ListDatasets", - "file": "automl_v1_generated_auto_ml_list_datasets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ListDatasets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_list_datasets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.list_model_evaluations", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ListModelEvaluations", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModelEvaluations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ListModelEvaluationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.services.auto_ml.pagers.ListModelEvaluationsAsyncPager", - "shortName": "list_model_evaluations" - }, - "description": "Sample for ListModelEvaluations", - "file": "automl_v1_generated_auto_ml_list_model_evaluations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ListModelEvaluations_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_list_model_evaluations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.list_model_evaluations", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ListModelEvaluations", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModelEvaluations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ListModelEvaluationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "filter", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.services.auto_ml.pagers.ListModelEvaluationsPager", - "shortName": "list_model_evaluations" - }, - "description": "Sample for ListModelEvaluations", - "file": "automl_v1_generated_auto_ml_list_model_evaluations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ListModelEvaluations_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_list_model_evaluations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.list_models", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ListModels", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModels" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ListModelsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.services.auto_ml.pagers.ListModelsAsyncPager", - "shortName": "list_models" - }, - "description": "Sample for ListModels", - "file": "automl_v1_generated_auto_ml_list_models_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ListModels_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_list_models_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.list_models", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.ListModels", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModels" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.ListModelsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.services.auto_ml.pagers.ListModelsPager", - "shortName": "list_models" - }, - "description": "Sample for ListModels", - "file": "automl_v1_generated_auto_ml_list_models_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_ListModels_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_list_models_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.undeploy_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.UndeployModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UndeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.UndeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "undeploy_model" - }, - "description": "Sample for UndeployModel", - "file": "automl_v1_generated_auto_ml_undeploy_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_UndeployModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_undeploy_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.undeploy_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.UndeployModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UndeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.UndeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "undeploy_model" - }, - "description": "Sample for UndeployModel", - "file": "automl_v1_generated_auto_ml_undeploy_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_UndeployModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_undeploy_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.update_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.UpdateDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.UpdateDatasetRequest" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1.types.Dataset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Dataset", - "shortName": "update_dataset" - }, - "description": "Sample for UpdateDataset", - "file": "automl_v1_generated_auto_ml_update_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_UpdateDataset_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_update_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.update_dataset", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.UpdateDataset", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.UpdateDatasetRequest" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1.types.Dataset" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Dataset", - "shortName": "update_dataset" - }, - "description": "Sample for UpdateDataset", - "file": "automl_v1_generated_auto_ml_update_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_UpdateDataset_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_update_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlAsyncClient.update_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.UpdateModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.UpdateModelRequest" - }, - { - "name": "model", - "type": "google.cloud.automl_v1.types.Model" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Model", - "shortName": "update_model" - }, - "description": "Sample for UpdateModel", - "file": "automl_v1_generated_auto_ml_update_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_UpdateModel_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_update_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1.AutoMlClient.update_model", - "method": { - "fullName": "google.cloud.automl.v1.AutoMl.UpdateModel", - "service": { - "fullName": "google.cloud.automl.v1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.UpdateModelRequest" - }, - { - "name": "model", - "type": "google.cloud.automl_v1.types.Model" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.Model", - "shortName": "update_model" - }, - "description": "Sample for UpdateModel", - "file": "automl_v1_generated_auto_ml_update_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_AutoMl_UpdateModel_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_auto_ml_update_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.PredictionServiceAsyncClient", - "shortName": "PredictionServiceAsyncClient" - }, - "fullName": "google.cloud.automl_v1.PredictionServiceAsyncClient.batch_predict", - "method": { - "fullName": "google.cloud.automl.v1.PredictionService.BatchPredict", - "service": { - "fullName": "google.cloud.automl.v1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "BatchPredict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.BatchPredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1.types.BatchPredictInputConfig" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1.types.BatchPredictOutputConfig" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_predict" - }, - "description": "Sample for BatchPredict", - "file": "automl_v1_generated_prediction_service_batch_predict_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_PredictionService_BatchPredict_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_prediction_service_batch_predict_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.PredictionServiceClient", - "shortName": "PredictionServiceClient" - }, - "fullName": "google.cloud.automl_v1.PredictionServiceClient.batch_predict", - "method": { - "fullName": "google.cloud.automl.v1.PredictionService.BatchPredict", - "service": { - "fullName": "google.cloud.automl.v1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "BatchPredict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.BatchPredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1.types.BatchPredictInputConfig" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1.types.BatchPredictOutputConfig" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_predict" - }, - "description": "Sample for BatchPredict", - "file": "automl_v1_generated_prediction_service_batch_predict_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_PredictionService_BatchPredict_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_prediction_service_batch_predict_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1.PredictionServiceAsyncClient", - "shortName": "PredictionServiceAsyncClient" - }, - "fullName": "google.cloud.automl_v1.PredictionServiceAsyncClient.predict", - "method": { - "fullName": "google.cloud.automl.v1.PredictionService.Predict", - "service": { - "fullName": "google.cloud.automl.v1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "Predict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.PredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "payload", - "type": "google.cloud.automl_v1.types.ExamplePayload" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.PredictResponse", - "shortName": "predict" - }, - "description": "Sample for Predict", - "file": "automl_v1_generated_prediction_service_predict_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_PredictionService_Predict_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_prediction_service_predict_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1.PredictionServiceClient", - "shortName": "PredictionServiceClient" - }, - "fullName": "google.cloud.automl_v1.PredictionServiceClient.predict", - "method": { - "fullName": "google.cloud.automl.v1.PredictionService.Predict", - "service": { - "fullName": "google.cloud.automl.v1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "Predict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1.types.PredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "payload", - "type": "google.cloud.automl_v1.types.ExamplePayload" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1.types.PredictResponse", - "shortName": "predict" - }, - "description": "Sample for Predict", - "file": "automl_v1_generated_prediction_service_predict_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1_generated_PredictionService_Predict_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1_generated_prediction_service_predict_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-automl/v1/scripts/fixup_automl_v1_keywords.py b/owl-bot-staging/google-cloud-automl/v1/scripts/fixup_automl_v1_keywords.py deleted file mode 100644 index 9aae11692344..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/scripts/fixup_automl_v1_keywords.py +++ /dev/null @@ -1,195 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class automlCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_predict': ('name', 'input_config', 'output_config', 'params', ), - 'create_dataset': ('parent', 'dataset', ), - 'create_model': ('parent', 'model', ), - 'delete_dataset': ('name', ), - 'delete_model': ('name', ), - 'deploy_model': ('name', 'image_object_detection_model_deployment_metadata', 'image_classification_model_deployment_metadata', ), - 'export_data': ('name', 'output_config', ), - 'export_model': ('name', 'output_config', ), - 'get_annotation_spec': ('name', ), - 'get_dataset': ('name', ), - 'get_model': ('name', ), - 'get_model_evaluation': ('name', ), - 'import_data': ('name', 'input_config', ), - 'list_datasets': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_model_evaluations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_models': ('parent', 'filter', 'page_size', 'page_token', ), - 'predict': ('name', 'payload', 'params', ), - 'undeploy_model': ('name', ), - 'update_dataset': ('dataset', 'update_mask', ), - 'update_model': ('model', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=automlCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the automl client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-automl/v1/setup.py b/owl-bot-staging/google-cloud-automl/v1/setup.py deleted file mode 100644 index 96aac7a718a2..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-automl' - - -description = "Google Cloud Automl API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/automl/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1/tests/__init__.py b/owl-bot-staging/google-cloud-automl/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-automl/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/__init__.py b/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/test_auto_ml.py b/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/test_auto_ml.py deleted file mode 100644 index 43f67860d9d9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/test_auto_ml.py +++ /dev/null @@ -1,14787 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.automl_v1.services.auto_ml import AutoMlAsyncClient -from google.cloud.automl_v1.services.auto_ml import AutoMlClient -from google.cloud.automl_v1.services.auto_ml import pagers -from google.cloud.automl_v1.services.auto_ml import transports -from google.cloud.automl_v1.types import annotation_spec -from google.cloud.automl_v1.types import classification -from google.cloud.automl_v1.types import dataset -from google.cloud.automl_v1.types import dataset as gca_dataset -from google.cloud.automl_v1.types import detection -from google.cloud.automl_v1.types import image -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import model -from google.cloud.automl_v1.types import model as gca_model -from google.cloud.automl_v1.types import model_evaluation -from google.cloud.automl_v1.types import operations -from google.cloud.automl_v1.types import service -from google.cloud.automl_v1.types import text -from google.cloud.automl_v1.types import text_extraction -from google.cloud.automl_v1.types import text_sentiment -from google.cloud.automl_v1.types import translation -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AutoMlClient._get_default_mtls_endpoint(None) is None - assert AutoMlClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AutoMlClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AutoMlClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AutoMlClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AutoMlClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AutoMlClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AutoMlClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AutoMlClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AutoMlClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AutoMlClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AutoMlClient._get_client_cert_source(None, False) is None - assert AutoMlClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AutoMlClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AutoMlClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AutoMlClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AutoMlClient._DEFAULT_UNIVERSE - default_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AutoMlClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AutoMlClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AutoMlClient.DEFAULT_MTLS_ENDPOINT - assert AutoMlClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AutoMlClient._get_api_endpoint(None, None, default_universe, "always") == AutoMlClient.DEFAULT_MTLS_ENDPOINT - assert AutoMlClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AutoMlClient.DEFAULT_MTLS_ENDPOINT - assert AutoMlClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AutoMlClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AutoMlClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AutoMlClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AutoMlClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AutoMlClient._get_universe_domain(None, None) == AutoMlClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AutoMlClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AutoMlClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AutoMlClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AutoMlClient, "grpc"), - (AutoMlAsyncClient, "grpc_asyncio"), - (AutoMlClient, "rest"), -]) -def test_auto_ml_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AutoMlGrpcTransport, "grpc"), - (transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AutoMlRestTransport, "rest"), -]) -def test_auto_ml_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AutoMlClient, "grpc"), - (AutoMlAsyncClient, "grpc_asyncio"), - (AutoMlClient, "rest"), -]) -def test_auto_ml_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -def test_auto_ml_client_get_transport_class(): - transport = AutoMlClient.get_transport_class() - available_transports = [ - transports.AutoMlGrpcTransport, - transports.AutoMlRestTransport, - ] - assert transport in available_transports - - transport = AutoMlClient.get_transport_class("grpc") - assert transport == transports.AutoMlGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio"), - (AutoMlClient, transports.AutoMlRestTransport, "rest"), -]) -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -def test_auto_ml_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AutoMlClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AutoMlClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", "true"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", "false"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AutoMlClient, transports.AutoMlRestTransport, "rest", "true"), - (AutoMlClient, transports.AutoMlRestTransport, "rest", "false"), -]) -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_auto_ml_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AutoMlClient, AutoMlAsyncClient -]) -@mock.patch.object(AutoMlClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoMlAsyncClient)) -def test_auto_ml_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AutoMlClient, AutoMlAsyncClient -]) -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -def test_auto_ml_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AutoMlClient._DEFAULT_UNIVERSE - default_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio"), - (AutoMlClient, transports.AutoMlRestTransport, "rest"), -]) -def test_auto_ml_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", grpc_helpers), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AutoMlClient, transports.AutoMlRestTransport, "rest", None), -]) -def test_auto_ml_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_auto_ml_client_client_options_from_dict(): - with mock.patch('google.cloud.automl_v1.services.auto_ml.transports.AutoMlGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AutoMlClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", grpc_helpers), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_auto_ml_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateDatasetRequest, - dict, -]) -def test_create_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateDatasetRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateDatasetRequest( - parent='parent_value', - ) - -def test_create_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc - request = {} - client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_dataset] = mock_rpc - - request = {} - await client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_dataset_async(transport: str = 'grpc_asyncio', request_type=service.CreateDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_dataset_async_from_dict(): - await test_create_dataset_async(request_type=dict) - -def test_create_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateDatasetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateDatasetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dataset( - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - - -def test_create_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dataset( - service.CreateDatasetRequest(), - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - -@pytest.mark.asyncio -async def test_create_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dataset( - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dataset( - service.CreateDatasetRequest(), - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetDatasetRequest, - dict, -]) -def test_get_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - response = client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -def test_get_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetDatasetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetDatasetRequest( - name='name_value', - ) - -def test_get_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc - request = {} - client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_dataset] = mock_rpc - - request = {} - await client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_dataset_async(transport: str = 'grpc_asyncio', request_type=service.GetDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - response = await client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_dataset_async_from_dict(): - await test_get_dataset_async(request_type=dict) - -def test_get_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value = dataset.Dataset() - client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) - await client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dataset( - service.GetDatasetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dataset( - service.GetDatasetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListDatasetsRequest, - dict, -]) -def test_list_datasets(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListDatasetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListDatasetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_datasets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListDatasetsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_datasets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListDatasetsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_datasets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_datasets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc - request = {} - client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_datasets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_datasets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_datasets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_datasets] = mock_rpc - - request = {} - await client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_datasets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_datasets_async(transport: str = 'grpc_asyncio', request_type=service.ListDatasetsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListDatasetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_datasets_async_from_dict(): - await test_list_datasets_async(request_type=dict) - -def test_list_datasets_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListDatasetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value = service.ListDatasetsResponse() - client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_datasets_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListDatasetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse()) - await client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_datasets_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListDatasetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_datasets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_datasets_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_datasets( - service.ListDatasetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_datasets_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListDatasetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_datasets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_datasets_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_datasets( - service.ListDatasetsRequest(), - parent='parent_value', - ) - - -def test_list_datasets_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_datasets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) - for i in results) -def test_list_datasets_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_datasets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_datasets_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_datasets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataset.Dataset) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_datasets_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_datasets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.UpdateDatasetRequest, - dict, -]) -def test_update_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - response = client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -def test_update_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateDatasetRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateDatasetRequest( - ) - -def test_update_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_dataset] = mock_rpc - request = {} - client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_dataset] = mock_rpc - - request = {} - await client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_dataset_async(transport: str = 'grpc_asyncio', request_type=service.UpdateDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - response = await client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_dataset_async_from_dict(): - await test_update_dataset_async(request_type=dict) - -def test_update_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateDatasetRequest() - - request.dataset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value = gca_dataset.Dataset() - client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'dataset.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateDatasetRequest() - - request.dataset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset()) - await client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'dataset.name=name_value', - ) in kw['metadata'] - - -def test_update_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_dataset( - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dataset( - service.UpdateDatasetRequest(), - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_dataset( - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_dataset( - service.UpdateDatasetRequest(), - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteDatasetRequest, - dict, -]) -def test_delete_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteDatasetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteDatasetRequest( - name='name_value', - ) - -def test_delete_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc - request = {} - client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_dataset] = mock_rpc - - request = {} - await client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_dataset_async(transport: str = 'grpc_asyncio', request_type=service.DeleteDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_dataset_async_from_dict(): - await test_delete_dataset_async(request_type=dict) - -def test_delete_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dataset( - service.DeleteDatasetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dataset( - service.DeleteDatasetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ImportDataRequest, - dict, -]) -def test_import_data(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ImportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_data_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ImportDataRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.import_data(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ImportDataRequest( - name='name_value', - ) - -def test_import_data_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.import_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.import_data] = mock_rpc - request = {} - client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_import_data_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.import_data in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.import_data] = mock_rpc - - request = {} - await client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.import_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_import_data_async(transport: str = 'grpc_asyncio', request_type=service.ImportDataRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ImportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_data_async_from_dict(): - await test_import_data_async(request_type=dict) - -def test_import_data_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ImportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_data_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ImportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_import_data_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.import_data( - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - - -def test_import_data_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_data( - service.ImportDataRequest(), - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - -@pytest.mark.asyncio -async def test_import_data_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.import_data( - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_import_data_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.import_data( - service.ImportDataRequest(), - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - -@pytest.mark.parametrize("request_type", [ - service.ExportDataRequest, - dict, -]) -def test_export_data(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ExportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_data_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ExportDataRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_data(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ExportDataRequest( - name='name_value', - ) - -def test_export_data_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_data] = mock_rpc - request = {} - client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_data_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.export_data in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_data] = mock_rpc - - request = {} - await client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_data_async(transport: str = 'grpc_asyncio', request_type=service.ExportDataRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ExportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_data_async_from_dict(): - await test_export_data_async(request_type=dict) - -def test_export_data_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_data_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_export_data_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_data( - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - - -def test_export_data_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_data( - service.ExportDataRequest(), - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - -@pytest.mark.asyncio -async def test_export_data_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_data( - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_data_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_data( - service.ExportDataRequest(), - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetAnnotationSpecRequest, - dict, -]) -def test_get_annotation_spec(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - ) - response = client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetAnnotationSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, annotation_spec.AnnotationSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.example_count == 1396 - - -def test_get_annotation_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetAnnotationSpecRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_annotation_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetAnnotationSpecRequest( - name='name_value', - ) - -def test_get_annotation_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_annotation_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_annotation_spec] = mock_rpc - request = {} - client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_annotation_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_annotation_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_annotation_spec in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_annotation_spec] = mock_rpc - - request = {} - await client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_annotation_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_annotation_spec_async(transport: str = 'grpc_asyncio', request_type=service.GetAnnotationSpecRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - )) - response = await client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetAnnotationSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, annotation_spec.AnnotationSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.example_count == 1396 - - -@pytest.mark.asyncio -async def test_get_annotation_spec_async_from_dict(): - await test_get_annotation_spec_async(request_type=dict) - -def test_get_annotation_spec_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAnnotationSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value = annotation_spec.AnnotationSpec() - client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_annotation_spec_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAnnotationSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec()) - await client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_annotation_spec_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = annotation_spec.AnnotationSpec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_annotation_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_annotation_spec_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_annotation_spec( - service.GetAnnotationSpecRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_annotation_spec_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = annotation_spec.AnnotationSpec() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_annotation_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_annotation_spec_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_annotation_spec( - service.GetAnnotationSpecRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateModelRequest, - dict, -]) -def test_create_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateModelRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateModelRequest( - parent='parent_value', - ) - -def test_create_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_model] = mock_rpc - request = {} - client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_model] = mock_rpc - - request = {} - await client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_model_async(transport: str = 'grpc_asyncio', request_type=service.CreateModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_model_async_from_dict(): - await test_create_model_async(request_type=dict) - -def test_create_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateModelRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateModelRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_model( - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].model - mock_val = gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')) - assert arg == mock_val - - -def test_create_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_model( - service.CreateModelRequest(), - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - -@pytest.mark.asyncio -async def test_create_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_model( - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].model - mock_val = gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_model( - service.CreateModelRequest(), - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelRequest, - dict, -]) -def test_get_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - ) - response = client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == model.Model.DeploymentState.DEPLOYED - assert response.etag == 'etag_value' - - -def test_get_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetModelRequest( - name='name_value', - ) - -def test_get_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model] = mock_rpc - request = {} - client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_model] = mock_rpc - - request = {} - await client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_async(transport: str = 'grpc_asyncio', request_type=service.GetModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - )) - response = await client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == model.Model.DeploymentState.DEPLOYED - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_model_async_from_dict(): - await test_get_model_async(request_type=dict) - -def test_get_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value = model.Model() - client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) - await client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model.Model() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model( - service.GetModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model.Model() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_model( - service.GetModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelsRequest, - dict, -]) -def test_list_models(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListModelsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_models_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListModelsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_models(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListModelsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_models_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_models in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_models] = mock_rpc - request = {} - client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_models(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_models_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_models in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_models] = mock_rpc - - request = {} - await client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_models(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_models_async(transport: str = 'grpc_asyncio', request_type=service.ListModelsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListModelsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_models_async_from_dict(): - await test_list_models_async(request_type=dict) - -def test_list_models_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value = service.ListModelsResponse() - client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_models_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse()) - await client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_models_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_models( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_models_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_models( - service.ListModelsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_models_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_models( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_models_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_models( - service.ListModelsRequest(), - parent='parent_value', - ) - - -def test_list_models_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_models(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model.Model) - for i in results) -def test_list_models_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - pages = list(client.list_models(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_models_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_models(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, model.Model) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_models_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_models(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.DeleteModelRequest, - dict, -]) -def test_delete_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteModelRequest( - name='name_value', - ) - -def test_delete_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_model] = mock_rpc - request = {} - client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_model] = mock_rpc - - request = {} - await client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_model_async(transport: str = 'grpc_asyncio', request_type=service.DeleteModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_model_async_from_dict(): - await test_delete_model_async(request_type=dict) - -def test_delete_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_model( - service.DeleteModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_model( - service.DeleteModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateModelRequest, - dict, -]) -def test_update_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=gca_model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - ) - response = client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == gca_model.Model.DeploymentState.DEPLOYED - assert response.etag == 'etag_value' - - -def test_update_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateModelRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateModelRequest( - ) - -def test_update_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_model] = mock_rpc - request = {} - client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_model] = mock_rpc - - request = {} - await client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_model_async(transport: str = 'grpc_asyncio', request_type=service.UpdateModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gca_model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=gca_model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - )) - response = await client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == gca_model.Model.DeploymentState.DEPLOYED - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_model_async_from_dict(): - await test_update_model_async(request_type=dict) - -def test_update_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateModelRequest() - - request.model.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - call.return_value = gca_model.Model() - client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'model.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateModelRequest() - - request.model.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_model.Model()) - await client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'model.name=name_value', - ) in kw['metadata'] - - -def test_update_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_model.Model() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_model( - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].model - mock_val = gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_model( - service.UpdateModelRequest(), - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_model.Model() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_model.Model()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_model( - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].model - mock_val = gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_model( - service.UpdateModelRequest(), - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeployModelRequest, - dict, -]) -def test_deploy_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_deploy_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeployModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.deploy_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeployModelRequest( - name='name_value', - ) - -def test_deploy_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.deploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.deploy_model] = mock_rpc - request = {} - client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_deploy_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.deploy_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.deploy_model] = mock_rpc - - request = {} - await client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.deploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_deploy_model_async(transport: str = 'grpc_asyncio', request_type=service.DeployModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_deploy_model_async_from_dict(): - await test_deploy_model_async(request_type=dict) - -def test_deploy_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deploy_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_deploy_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.deploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_deploy_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.deploy_model( - service.DeployModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_deploy_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.deploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_deploy_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.deploy_model( - service.DeployModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UndeployModelRequest, - dict, -]) -def test_undeploy_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UndeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_undeploy_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UndeployModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.undeploy_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UndeployModelRequest( - name='name_value', - ) - -def test_undeploy_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.undeploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.undeploy_model] = mock_rpc - request = {} - client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_undeploy_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.undeploy_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.undeploy_model] = mock_rpc - - request = {} - await client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.undeploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_undeploy_model_async(transport: str = 'grpc_asyncio', request_type=service.UndeployModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UndeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_undeploy_model_async_from_dict(): - await test_undeploy_model_async(request_type=dict) - -def test_undeploy_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UndeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_undeploy_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UndeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_undeploy_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.undeploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_undeploy_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.undeploy_model( - service.UndeployModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_undeploy_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.undeploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_undeploy_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.undeploy_model( - service.UndeployModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ExportModelRequest, - dict, -]) -def test_export_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ExportModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ExportModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ExportModelRequest( - name='name_value', - ) - -def test_export_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_model] = mock_rpc - request = {} - client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.export_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_model] = mock_rpc - - request = {} - await client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_model_async(transport: str = 'grpc_asyncio', request_type=service.ExportModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ExportModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_model_async_from_dict(): - await test_export_model_async(request_type=dict) - -def test_export_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_export_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_model( - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - - -def test_export_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_model( - service.ExportModelRequest(), - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - -@pytest.mark.asyncio -async def test_export_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_model( - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_model( - service.ExportModelRequest(), - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelEvaluationRequest, - dict, -]) -def test_get_model_evaluation(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - ) - response = client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetModelEvaluationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model_evaluation.ModelEvaluation) - assert response.name == 'name_value' - assert response.annotation_spec_id == 'annotation_spec_id_value' - assert response.display_name == 'display_name_value' - assert response.evaluated_example_count == 2446 - - -def test_get_model_evaluation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetModelEvaluationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_model_evaluation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetModelEvaluationRequest( - name='name_value', - ) - -def test_get_model_evaluation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model_evaluation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model_evaluation] = mock_rpc - request = {} - client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model_evaluation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_evaluation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_model_evaluation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_model_evaluation] = mock_rpc - - request = {} - await client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_model_evaluation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_evaluation_async(transport: str = 'grpc_asyncio', request_type=service.GetModelEvaluationRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - )) - response = await client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetModelEvaluationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model_evaluation.ModelEvaluation) - assert response.name == 'name_value' - assert response.annotation_spec_id == 'annotation_spec_id_value' - assert response.display_name == 'display_name_value' - assert response.evaluated_example_count == 2446 - - -@pytest.mark.asyncio -async def test_get_model_evaluation_async_from_dict(): - await test_get_model_evaluation_async(request_type=dict) - -def test_get_model_evaluation_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelEvaluationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value = model_evaluation.ModelEvaluation() - client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_model_evaluation_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelEvaluationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation()) - await client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_model_evaluation_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model_evaluation.ModelEvaluation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_model_evaluation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_model_evaluation_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model_evaluation( - service.GetModelEvaluationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_model_evaluation_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model_evaluation.ModelEvaluation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_model_evaluation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_model_evaluation_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_model_evaluation( - service.GetModelEvaluationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelEvaluationsRequest, - dict, -]) -def test_list_model_evaluations(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListModelEvaluationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelEvaluationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_model_evaluations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListModelEvaluationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_model_evaluations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListModelEvaluationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_model_evaluations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_model_evaluations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_model_evaluations] = mock_rpc - request = {} - client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_model_evaluations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_model_evaluations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_model_evaluations] = mock_rpc - - request = {} - await client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_model_evaluations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_model_evaluations_async(transport: str = 'grpc_asyncio', request_type=service.ListModelEvaluationsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListModelEvaluationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelEvaluationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_from_dict(): - await test_list_model_evaluations_async(request_type=dict) - -def test_list_model_evaluations_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelEvaluationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value = service.ListModelEvaluationsResponse() - client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_model_evaluations_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelEvaluationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse()) - await client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_model_evaluations_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelEvaluationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_model_evaluations( - parent='parent_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - - -def test_list_model_evaluations_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_model_evaluations( - service.ListModelEvaluationsRequest(), - parent='parent_value', - filter='filter_value', - ) - -@pytest.mark.asyncio -async def test_list_model_evaluations_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelEvaluationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_model_evaluations( - parent='parent_value', - filter='filter_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].filter - mock_val = 'filter_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_model_evaluations_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_model_evaluations( - service.ListModelEvaluationsRequest(), - parent='parent_value', - filter='filter_value', - ) - - -def test_list_model_evaluations_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_model_evaluations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model_evaluation.ModelEvaluation) - for i in results) -def test_list_model_evaluations_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_model_evaluations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_model_evaluations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, model_evaluation.ModelEvaluation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_model_evaluations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc - - request = {} - client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_dataset_rest_required_fields(request_type=service.CreateDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "dataset", ))) - - -def test_create_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/datasets" % client.transport._host, args[1]) - - -def test_create_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dataset( - service.CreateDatasetRequest(), - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - -def test_get_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc - - request = {} - client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_dataset_rest_required_fields(request_type=service.GetDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dataset.Dataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dataset.Dataset() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/datasets/*}" % client.transport._host, args[1]) - - -def test_get_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dataset( - service.GetDatasetRequest(), - name='name_value', - ) - - -def test_list_datasets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_datasets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc - - request = {} - client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_datasets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_datasets_rest_required_fields(request_type=service.ListDatasetsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_datasets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_datasets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListDatasetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_datasets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_datasets_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_datasets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_datasets_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListDatasetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_datasets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/datasets" % client.transport._host, args[1]) - - -def test_list_datasets_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_datasets( - service.ListDatasetsRequest(), - parent='parent_value', - ) - - -def test_list_datasets_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListDatasetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_datasets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) - for i in results) - - pages = list(client.list_datasets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_dataset] = mock_rpc - - request = {} - client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_dataset_rest_required_fields(request_type=service.UpdateDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_dataset._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("dataset", "updateMask", ))) - - -def test_update_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset() - - # get arguments that satisfy an http rule for this method - sample_request = {'dataset': {'name': 'projects/sample1/locations/sample2/datasets/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{dataset.name=projects/*/locations/*/datasets/*}" % client.transport._host, args[1]) - - -def test_update_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dataset( - service.UpdateDatasetRequest(), - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc - - request = {} - client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_dataset_rest_required_fields(request_type=service.DeleteDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/datasets/*}" % client.transport._host, args[1]) - - -def test_delete_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dataset( - service.DeleteDatasetRequest(), - name='name_value', - ) - - -def test_import_data_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.import_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.import_data] = mock_rpc - - request = {} - client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_import_data_rest_required_fields(request_type=service.ImportDataRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.import_data(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_import_data_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.import_data._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) - - -def test_import_data_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.import_data(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/datasets/*}:importData" % client.transport._host, args[1]) - - -def test_import_data_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_data( - service.ImportDataRequest(), - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - -def test_export_data_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_data] = mock_rpc - - request = {} - client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_data_rest_required_fields(request_type=service.ExportDataRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_data(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_data_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_data._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) - - -def test_export_data_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.export_data(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/datasets/*}:exportData" % client.transport._host, args[1]) - - -def test_export_data_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_data( - service.ExportDataRequest(), - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -def test_get_annotation_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_annotation_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_annotation_spec] = mock_rpc - - request = {} - client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_annotation_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_annotation_spec_rest_required_fields(request_type=service.GetAnnotationSpecRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_annotation_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_annotation_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = annotation_spec.AnnotationSpec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = annotation_spec.AnnotationSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_annotation_spec(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_annotation_spec_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_annotation_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_annotation_spec_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = annotation_spec.AnnotationSpec() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3/annotationSpecs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = annotation_spec.AnnotationSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_annotation_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/datasets/*/annotationSpecs/*}" % client.transport._host, args[1]) - - -def test_get_annotation_spec_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_annotation_spec( - service.GetAnnotationSpecRequest(), - name='name_value', - ) - - -def test_create_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_model] = mock_rpc - - request = {} - client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_model_rest_required_fields(request_type=service.CreateModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "model", ))) - - -def test_create_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/models" % client.transport._host, args[1]) - - -def test_create_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_model( - service.CreateModelRequest(), - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - -def test_get_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model] = mock_rpc - - request = {} - client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_model_rest_required_fields(request_type=service.GetModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = model.Model() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model.Model() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}" % client.transport._host, args[1]) - - -def test_get_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model( - service.GetModelRequest(), - name='name_value', - ) - - -def test_list_models_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_models in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_models] = mock_rpc - - request = {} - client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_models(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_models_rest_required_fields(request_type=service.ListModelsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_models._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_models._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListModelsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_models(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_models_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_models._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_models_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListModelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_models(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/models" % client.transport._host, args[1]) - - -def test_list_models_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_models( - service.ListModelsRequest(), - parent='parent_value', - ) - - -def test_list_models_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListModelsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_models(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model.Model) - for i in results) - - pages = list(client.list_models(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_model] = mock_rpc - - request = {} - client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_model_rest_required_fields(request_type=service.DeleteModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}" % client.transport._host, args[1]) - - -def test_delete_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_model( - service.DeleteModelRequest(), - name='name_value', - ) - - -def test_update_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_model] = mock_rpc - - request = {} - client.update_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_model_rest_required_fields(request_type=service.UpdateModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_model._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gca_model.Model() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("model", "updateMask", ))) - - -def test_update_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_model.Model() - - # get arguments that satisfy an http rule for this method - sample_request = {'model': {'name': 'projects/sample1/locations/sample2/models/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gca_model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{model.name=projects/*/locations/*/models/*}" % client.transport._host, args[1]) - - -def test_update_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_model( - service.UpdateModelRequest(), - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_deploy_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.deploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.deploy_model] = mock_rpc - - request = {} - client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_deploy_model_rest_required_fields(request_type=service.DeployModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.deploy_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_deploy_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.deploy_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_deploy_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.deploy_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}:deploy" % client.transport._host, args[1]) - - -def test_deploy_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.deploy_model( - service.DeployModelRequest(), - name='name_value', - ) - - -def test_undeploy_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.undeploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.undeploy_model] = mock_rpc - - request = {} - client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_undeploy_model_rest_required_fields(request_type=service.UndeployModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undeploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undeploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.undeploy_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_undeploy_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.undeploy_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_undeploy_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.undeploy_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}:undeploy" % client.transport._host, args[1]) - - -def test_undeploy_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.undeploy_model( - service.UndeployModelRequest(), - name='name_value', - ) - - -def test_export_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_model] = mock_rpc - - request = {} - client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_model_rest_required_fields(request_type=service.ExportModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) - - -def test_export_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.export_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}:export" % client.transport._host, args[1]) - - -def test_export_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_model( - service.ExportModelRequest(), - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -def test_get_model_evaluation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model_evaluation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model_evaluation] = mock_rpc - - request = {} - client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model_evaluation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_model_evaluation_rest_required_fields(request_type=service.GetModelEvaluationRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model_evaluation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model_evaluation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = model_evaluation.ModelEvaluation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model_evaluation.ModelEvaluation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_model_evaluation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_model_evaluation_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_model_evaluation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_model_evaluation_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model_evaluation.ModelEvaluation() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3/modelEvaluations/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = model_evaluation.ModelEvaluation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_model_evaluation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*/modelEvaluations/*}" % client.transport._host, args[1]) - - -def test_get_model_evaluation_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model_evaluation( - service.GetModelEvaluationRequest(), - name='name_value', - ) - - -def test_list_model_evaluations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_model_evaluations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_model_evaluations] = mock_rpc - - request = {} - client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_model_evaluations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_model_evaluations_rest_required_fields(request_type=service.ListModelEvaluationsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["filter"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "filter" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_model_evaluations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "filter" in jsonified_request - assert jsonified_request["filter"] == request_init["filter"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["filter"] = 'filter_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_model_evaluations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "filter" in jsonified_request - assert jsonified_request["filter"] == 'filter_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListModelEvaluationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelEvaluationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_model_evaluations(request) - - expected_params = [ - ( - "filter", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_model_evaluations_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_model_evaluations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", "filter", ))) - - -def test_list_model_evaluations_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelEvaluationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - filter='filter_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListModelEvaluationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_model_evaluations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/models/*}/modelEvaluations" % client.transport._host, args[1]) - - -def test_list_model_evaluations_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_model_evaluations( - service.ListModelEvaluationsRequest(), - parent='parent_value', - filter='filter_value', - ) - - -def test_list_model_evaluations_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListModelEvaluationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - - pager = client.list_model_evaluations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model_evaluation.ModelEvaluation) - for i in results) - - pages = list(client.list_model_evaluations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoMlClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AutoMlClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AutoMlClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoMlClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AutoMlClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AutoMlGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AutoMlGrpcTransport, - transports.AutoMlGrpcAsyncIOTransport, - transports.AutoMlRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AutoMlClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value = dataset.Dataset() - client.get_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_datasets_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value = service.ListDatasetsResponse() - client.list_datasets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListDatasetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value = gca_dataset.Dataset() - client.update_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_data_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ImportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_data_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_annotation_spec_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value = annotation_spec.AnnotationSpec() - client.get_annotation_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAnnotationSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value = model.Model() - client.get_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_models_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value = service.ListModelsResponse() - client.list_models(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - call.return_value = gca_model.Model() - client.update_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_deploy_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.deploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_undeploy_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.undeploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UndeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_evaluation_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value = model_evaluation.ModelEvaluation() - client.get_model_evaluation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelEvaluationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_model_evaluations_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value = service.ListModelEvaluationsResponse() - client.list_model_evaluations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelEvaluationsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AutoMlAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - await client.get_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_datasets_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_datasets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListDatasetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - await client.update_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_import_data_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.import_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ImportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_data_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_annotation_spec_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - )) - await client.get_annotation_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAnnotationSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - )) - await client.get_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_models_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse( - next_page_token='next_page_token_value', - )) - await client.list_models(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=gca_model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - )) - await client.update_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_deploy_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.deploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_undeploy_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.undeploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UndeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_model_evaluation_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - )) - await client.get_model_evaluation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelEvaluationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_model_evaluations_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_model_evaluations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelEvaluationsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AutoMlClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_dataset_rest_bad_request(request_type=service.CreateDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateDatasetRequest, - dict, -]) -def test_create_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["dataset"] = {'translation_dataset_metadata': {'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_dataset_metadata': {'classification_type': 1}, 'text_classification_dataset_metadata': {'classification_type': 1}, 'image_object_detection_dataset_metadata': {}, 'text_extraction_dataset_metadata': {}, 'text_sentiment_dataset_metadata': {'sentiment_max': 1404}, 'name': 'name_value', 'display_name': 'display_name_value', 'description': 'description_value', 'example_count': 1396, 'create_time': {'seconds': 751, 'nanos': 543}, 'etag': 'etag_value', 'labels': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateDatasetRequest.meta.fields["dataset"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dataset"][field])): - del request_init["dataset"][field][i][subfield] - else: - del request_init["dataset"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_dataset(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_create_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateDatasetRequest.pb(service.CreateDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_dataset_rest_bad_request(request_type=service.GetDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetDatasetRequest, - dict, -]) -def test_get_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetDatasetRequest.pb(service.GetDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dataset.Dataset.to_json(dataset.Dataset()) - req.return_value.content = return_value - - request = service.GetDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dataset.Dataset() - post_with_metadata.return_value = dataset.Dataset(), metadata - - client.get_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_datasets_rest_bad_request(request_type=service.ListDatasetsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_datasets(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListDatasetsRequest, - dict, -]) -def test_list_datasets_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListDatasetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_datasets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_datasets_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_datasets") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_datasets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_datasets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListDatasetsRequest.pb(service.ListDatasetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListDatasetsResponse.to_json(service.ListDatasetsResponse()) - req.return_value.content = return_value - - request = service.ListDatasetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListDatasetsResponse() - post_with_metadata.return_value = service.ListDatasetsResponse(), metadata - - client.list_datasets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_dataset_rest_bad_request(request_type=service.UpdateDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'dataset': {'name': 'projects/sample1/locations/sample2/datasets/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateDatasetRequest, - dict, -]) -def test_update_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'dataset': {'name': 'projects/sample1/locations/sample2/datasets/sample3'}} - request_init["dataset"] = {'translation_dataset_metadata': {'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_dataset_metadata': {'classification_type': 1}, 'text_classification_dataset_metadata': {'classification_type': 1}, 'image_object_detection_dataset_metadata': {}, 'text_extraction_dataset_metadata': {}, 'text_sentiment_dataset_metadata': {'sentiment_max': 1404}, 'name': 'projects/sample1/locations/sample2/datasets/sample3', 'display_name': 'display_name_value', 'description': 'description_value', 'example_count': 1396, 'create_time': {'seconds': 751, 'nanos': 543}, 'etag': 'etag_value', 'labels': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateDatasetRequest.meta.fields["dataset"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dataset"][field])): - del request_init["dataset"][field][i][subfield] - else: - del request_init["dataset"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_update_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateDatasetRequest.pb(service.UpdateDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gca_dataset.Dataset.to_json(gca_dataset.Dataset()) - req.return_value.content = return_value - - request = service.UpdateDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gca_dataset.Dataset() - post_with_metadata.return_value = gca_dataset.Dataset(), metadata - - client.update_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_dataset_rest_bad_request(request_type=service.DeleteDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteDatasetRequest, - dict, -]) -def test_delete_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_dataset(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_delete_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteDatasetRequest.pb(service.DeleteDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_import_data_rest_bad_request(request_type=service.ImportDataRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.import_data(request) - - -@pytest.mark.parametrize("request_type", [ - service.ImportDataRequest, - dict, -]) -def test_import_data_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.import_data(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_data_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_import_data") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_import_data_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_import_data") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ImportDataRequest.pb(service.ImportDataRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ImportDataRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.import_data(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_export_data_rest_bad_request(request_type=service.ExportDataRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_data(request) - - -@pytest.mark.parametrize("request_type", [ - service.ExportDataRequest, - dict, -]) -def test_export_data_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_data(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_data_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_data") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_data_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_export_data") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ExportDataRequest.pb(service.ExportDataRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ExportDataRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_data(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_annotation_spec_rest_bad_request(request_type=service.GetAnnotationSpecRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/annotationSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_annotation_spec(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetAnnotationSpecRequest, - dict, -]) -def test_get_annotation_spec_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/annotationSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = annotation_spec.AnnotationSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_annotation_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, annotation_spec.AnnotationSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.example_count == 1396 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_annotation_spec_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_annotation_spec") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_annotation_spec_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_annotation_spec") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetAnnotationSpecRequest.pb(service.GetAnnotationSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = annotation_spec.AnnotationSpec.to_json(annotation_spec.AnnotationSpec()) - req.return_value.content = return_value - - request = service.GetAnnotationSpecRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = annotation_spec.AnnotationSpec() - post_with_metadata.return_value = annotation_spec.AnnotationSpec(), metadata - - client.get_annotation_spec(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_model_rest_bad_request(request_type=service.CreateModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateModelRequest, - dict, -]) -def test_create_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["model"] = {'translation_model_metadata': {'base_model': 'base_model_value', 'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_model_metadata': {'base_model_id': 'base_model_id_value', 'train_budget_milli_node_hours': 3075, 'train_cost_milli_node_hours': 2881, 'stop_reason': 'stop_reason_value', 'model_type': 'model_type_value', 'node_qps': 0.857, 'node_count': 1070}, 'text_classification_model_metadata': {'classification_type': 1}, 'image_object_detection_model_metadata': {'model_type': 'model_type_value', 'node_count': 1070, 'node_qps': 0.857, 'stop_reason': 'stop_reason_value', 'train_budget_milli_node_hours': 3075, 'train_cost_milli_node_hours': 2881}, 'text_extraction_model_metadata': {}, 'text_sentiment_model_metadata': {}, 'name': 'name_value', 'display_name': 'display_name_value', 'dataset_id': 'dataset_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'deployment_state': 1, 'etag': 'etag_value', 'labels': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateModelRequest.meta.fields["model"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["model"][field])): - del request_init["model"][field][i][subfield] - else: - del request_init["model"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_create_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateModelRequest.pb(service.CreateModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_model_rest_bad_request(request_type=service.GetModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelRequest, - dict, -]) -def test_get_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_model(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == model.Model.DeploymentState.DEPLOYED - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetModelRequest.pb(service.GetModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = model.Model.to_json(model.Model()) - req.return_value.content = return_value - - request = service.GetModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = model.Model() - post_with_metadata.return_value = model.Model(), metadata - - client.get_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_models_rest_bad_request(request_type=service.ListModelsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_models(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelsRequest, - dict, -]) -def test_list_models_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_models(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_models_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_models") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_models_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_models") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListModelsRequest.pb(service.ListModelsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListModelsResponse.to_json(service.ListModelsResponse()) - req.return_value.content = return_value - - request = service.ListModelsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListModelsResponse() - post_with_metadata.return_value = service.ListModelsResponse(), metadata - - client.list_models(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_model_rest_bad_request(request_type=service.DeleteModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteModelRequest, - dict, -]) -def test_delete_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_delete_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteModelRequest.pb(service.DeleteModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_model_rest_bad_request(request_type=service.UpdateModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'model': {'name': 'projects/sample1/locations/sample2/models/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateModelRequest, - dict, -]) -def test_update_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'model': {'name': 'projects/sample1/locations/sample2/models/sample3'}} - request_init["model"] = {'translation_model_metadata': {'base_model': 'base_model_value', 'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_model_metadata': {'base_model_id': 'base_model_id_value', 'train_budget_milli_node_hours': 3075, 'train_cost_milli_node_hours': 2881, 'stop_reason': 'stop_reason_value', 'model_type': 'model_type_value', 'node_qps': 0.857, 'node_count': 1070}, 'text_classification_model_metadata': {'classification_type': 1}, 'image_object_detection_model_metadata': {'model_type': 'model_type_value', 'node_count': 1070, 'node_qps': 0.857, 'stop_reason': 'stop_reason_value', 'train_budget_milli_node_hours': 3075, 'train_cost_milli_node_hours': 2881}, 'text_extraction_model_metadata': {}, 'text_sentiment_model_metadata': {}, 'name': 'projects/sample1/locations/sample2/models/sample3', 'display_name': 'display_name_value', 'dataset_id': 'dataset_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'deployment_state': 1, 'etag': 'etag_value', 'labels': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateModelRequest.meta.fields["model"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["model"][field])): - del request_init["model"][field][i][subfield] - else: - del request_init["model"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=gca_model.Model.DeploymentState.DEPLOYED, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_model(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == gca_model.Model.DeploymentState.DEPLOYED - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_update_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateModelRequest.pb(service.UpdateModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gca_model.Model.to_json(gca_model.Model()) - req.return_value.content = return_value - - request = service.UpdateModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gca_model.Model() - post_with_metadata.return_value = gca_model.Model(), metadata - - client.update_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_deploy_model_rest_bad_request(request_type=service.DeployModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.deploy_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeployModelRequest, - dict, -]) -def test_deploy_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.deploy_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deploy_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_deploy_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_deploy_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_deploy_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeployModelRequest.pb(service.DeployModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeployModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.deploy_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_undeploy_model_rest_bad_request(request_type=service.UndeployModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.undeploy_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.UndeployModelRequest, - dict, -]) -def test_undeploy_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.undeploy_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undeploy_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_undeploy_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_undeploy_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_undeploy_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UndeployModelRequest.pb(service.UndeployModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UndeployModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.undeploy_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_export_model_rest_bad_request(request_type=service.ExportModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.ExportModelRequest, - dict, -]) -def test_export_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_export_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ExportModelRequest.pb(service.ExportModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ExportModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_model_evaluation_rest_bad_request(request_type=service.GetModelEvaluationRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3/modelEvaluations/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_model_evaluation(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelEvaluationRequest, - dict, -]) -def test_get_model_evaluation_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3/modelEvaluations/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model_evaluation.ModelEvaluation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_model_evaluation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, model_evaluation.ModelEvaluation) - assert response.name == 'name_value' - assert response.annotation_spec_id == 'annotation_spec_id_value' - assert response.display_name == 'display_name_value' - assert response.evaluated_example_count == 2446 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_model_evaluation_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model_evaluation") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model_evaluation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_model_evaluation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetModelEvaluationRequest.pb(service.GetModelEvaluationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = model_evaluation.ModelEvaluation.to_json(model_evaluation.ModelEvaluation()) - req.return_value.content = return_value - - request = service.GetModelEvaluationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = model_evaluation.ModelEvaluation() - post_with_metadata.return_value = model_evaluation.ModelEvaluation(), metadata - - client.get_model_evaluation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_model_evaluations_rest_bad_request(request_type=service.ListModelEvaluationsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_model_evaluations(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelEvaluationsRequest, - dict, -]) -def test_list_model_evaluations_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelEvaluationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_model_evaluations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelEvaluationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_model_evaluations_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_model_evaluations") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_model_evaluations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_model_evaluations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListModelEvaluationsRequest.pb(service.ListModelEvaluationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListModelEvaluationsResponse.to_json(service.ListModelEvaluationsResponse()) - req.return_value.content = return_value - - request = service.ListModelEvaluationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListModelEvaluationsResponse() - post_with_metadata.return_value = service.ListModelEvaluationsResponse(), metadata - - client.list_model_evaluations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - client.create_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - client.get_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_datasets_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - client.list_datasets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListDatasetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - client.update_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - client.delete_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_data_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - client.import_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ImportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_data_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - client.export_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_annotation_spec_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - client.get_annotation_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAnnotationSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - client.create_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - client.get_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_models_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - client.list_models(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - client.delete_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_model), - '__call__') as call: - client.update_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_deploy_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - client.deploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_undeploy_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - client.undeploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UndeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - client.export_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_evaluation_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - client.get_model_evaluation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelEvaluationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_model_evaluations_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - client.list_model_evaluations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelEvaluationsRequest() - - assert args[0] == request_msg - - -def test_auto_ml_rest_lro_client(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AutoMlGrpcTransport, - ) - -def test_auto_ml_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AutoMlTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_auto_ml_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.automl_v1.services.auto_ml.transports.AutoMlTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AutoMlTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_dataset', - 'get_dataset', - 'list_datasets', - 'update_dataset', - 'delete_dataset', - 'import_data', - 'export_data', - 'get_annotation_spec', - 'create_model', - 'get_model', - 'list_models', - 'delete_model', - 'update_model', - 'deploy_model', - 'undeploy_model', - 'export_model', - 'get_model_evaluation', - 'list_model_evaluations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_auto_ml_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.automl_v1.services.auto_ml.transports.AutoMlTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoMlTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_auto_ml_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.automl_v1.services.auto_ml.transports.AutoMlTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoMlTransport() - adc.assert_called_once() - - -def test_auto_ml_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AutoMlClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AutoMlGrpcTransport, - transports.AutoMlGrpcAsyncIOTransport, - ], -) -def test_auto_ml_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AutoMlGrpcTransport, - transports.AutoMlGrpcAsyncIOTransport, - transports.AutoMlRestTransport, - ], -) -def test_auto_ml_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AutoMlGrpcTransport, grpc_helpers), - (transports.AutoMlGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_auto_ml_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AutoMlGrpcTransport, transports.AutoMlGrpcAsyncIOTransport]) -def test_auto_ml_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_auto_ml_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AutoMlRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_auto_ml_host_no_port(transport_name): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_auto_ml_host_with_port(transport_name): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_auto_ml_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AutoMlClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AutoMlClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_dataset._session - session2 = client2.transport.create_dataset._session - assert session1 != session2 - session1 = client1.transport.get_dataset._session - session2 = client2.transport.get_dataset._session - assert session1 != session2 - session1 = client1.transport.list_datasets._session - session2 = client2.transport.list_datasets._session - assert session1 != session2 - session1 = client1.transport.update_dataset._session - session2 = client2.transport.update_dataset._session - assert session1 != session2 - session1 = client1.transport.delete_dataset._session - session2 = client2.transport.delete_dataset._session - assert session1 != session2 - session1 = client1.transport.import_data._session - session2 = client2.transport.import_data._session - assert session1 != session2 - session1 = client1.transport.export_data._session - session2 = client2.transport.export_data._session - assert session1 != session2 - session1 = client1.transport.get_annotation_spec._session - session2 = client2.transport.get_annotation_spec._session - assert session1 != session2 - session1 = client1.transport.create_model._session - session2 = client2.transport.create_model._session - assert session1 != session2 - session1 = client1.transport.get_model._session - session2 = client2.transport.get_model._session - assert session1 != session2 - session1 = client1.transport.list_models._session - session2 = client2.transport.list_models._session - assert session1 != session2 - session1 = client1.transport.delete_model._session - session2 = client2.transport.delete_model._session - assert session1 != session2 - session1 = client1.transport.update_model._session - session2 = client2.transport.update_model._session - assert session1 != session2 - session1 = client1.transport.deploy_model._session - session2 = client2.transport.deploy_model._session - assert session1 != session2 - session1 = client1.transport.undeploy_model._session - session2 = client2.transport.undeploy_model._session - assert session1 != session2 - session1 = client1.transport.export_model._session - session2 = client2.transport.export_model._session - assert session1 != session2 - session1 = client1.transport.get_model_evaluation._session - session2 = client2.transport.get_model_evaluation._session - assert session1 != session2 - session1 = client1.transport.list_model_evaluations._session - session2 = client2.transport.list_model_evaluations._session - assert session1 != session2 -def test_auto_ml_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AutoMlGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_auto_ml_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AutoMlGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AutoMlGrpcTransport, transports.AutoMlGrpcAsyncIOTransport]) -def test_auto_ml_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AutoMlGrpcTransport, transports.AutoMlGrpcAsyncIOTransport]) -def test_auto_ml_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_auto_ml_grpc_lro_client(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_auto_ml_grpc_lro_async_client(): - client = AutoMlAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_annotation_spec_path(): - project = "squid" - location = "clam" - dataset = "whelk" - annotation_spec = "octopus" - expected = "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format(project=project, location=location, dataset=dataset, annotation_spec=annotation_spec, ) - actual = AutoMlClient.annotation_spec_path(project, location, dataset, annotation_spec) - assert expected == actual - - -def test_parse_annotation_spec_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "dataset": "cuttlefish", - "annotation_spec": "mussel", - } - path = AutoMlClient.annotation_spec_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_annotation_spec_path(path) - assert expected == actual - -def test_dataset_path(): - project = "winkle" - location = "nautilus" - dataset = "scallop" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(project=project, location=location, dataset=dataset, ) - actual = AutoMlClient.dataset_path(project, location, dataset) - assert expected == actual - - -def test_parse_dataset_path(): - expected = { - "project": "abalone", - "location": "squid", - "dataset": "clam", - } - path = AutoMlClient.dataset_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_dataset_path(path) - assert expected == actual - -def test_model_path(): - project = "whelk" - location = "octopus" - model = "oyster" - expected = "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - actual = AutoMlClient.model_path(project, location, model) - assert expected == actual - - -def test_parse_model_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "model": "mussel", - } - path = AutoMlClient.model_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_model_path(path) - assert expected == actual - -def test_model_evaluation_path(): - project = "winkle" - location = "nautilus" - model = "scallop" - model_evaluation = "abalone" - expected = "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}".format(project=project, location=location, model=model, model_evaluation=model_evaluation, ) - actual = AutoMlClient.model_evaluation_path(project, location, model, model_evaluation) - assert expected == actual - - -def test_parse_model_evaluation_path(): - expected = { - "project": "squid", - "location": "clam", - "model": "whelk", - "model_evaluation": "octopus", - } - path = AutoMlClient.model_evaluation_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_model_evaluation_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AutoMlClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = AutoMlClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = AutoMlClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = AutoMlClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AutoMlClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = AutoMlClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = AutoMlClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = AutoMlClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AutoMlClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = AutoMlClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AutoMlTransport, '_prep_wrapped_messages') as prep: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AutoMlTransport, '_prep_wrapped_messages') as prep: - transport_class = AutoMlClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AutoMlClient, transports.AutoMlGrpcTransport), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/test_prediction_service.py b/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/test_prediction_service.py deleted file mode 100644 index d5b7cfb2c49a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1/tests/unit/gapic/automl_v1/test_prediction_service.py +++ /dev/null @@ -1,2916 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.automl_v1.services.prediction_service import PredictionServiceAsyncClient -from google.cloud.automl_v1.services.prediction_service import PredictionServiceClient -from google.cloud.automl_v1.services.prediction_service import transports -from google.cloud.automl_v1.types import annotation_payload -from google.cloud.automl_v1.types import data_items -from google.cloud.automl_v1.types import geometry -from google.cloud.automl_v1.types import io -from google.cloud.automl_v1.types import operations -from google.cloud.automl_v1.types import prediction_service -from google.cloud.automl_v1.types import text_segment -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PredictionServiceClient._get_default_mtls_endpoint(None) is None - assert PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert PredictionServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - PredictionServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert PredictionServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert PredictionServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - PredictionServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert PredictionServiceClient._get_client_cert_source(None, False) is None - assert PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert PredictionServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert PredictionServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert PredictionServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - assert PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert PredictionServiceClient._get_api_endpoint(None, None, default_universe, "always") == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - assert PredictionServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - assert PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - PredictionServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert PredictionServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert PredictionServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert PredictionServiceClient._get_universe_domain(None, None) == PredictionServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - PredictionServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = PredictionServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = PredictionServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (PredictionServiceClient, "grpc"), - (PredictionServiceAsyncClient, "grpc_asyncio"), - (PredictionServiceClient, "rest"), -]) -def test_prediction_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PredictionServiceGrpcTransport, "grpc"), - (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.PredictionServiceRestTransport, "rest"), -]) -def test_prediction_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PredictionServiceClient, "grpc"), - (PredictionServiceAsyncClient, "grpc_asyncio"), - (PredictionServiceClient, "rest"), -]) -def test_prediction_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -def test_prediction_service_client_get_transport_class(): - transport = PredictionServiceClient.get_transport_class() - available_transports = [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceRestTransport, - ] - assert transport in available_transports - - transport = PredictionServiceClient.get_transport_class("grpc") - assert transport == transports.PredictionServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), -]) -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -def test_prediction_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PredictionServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PredictionServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", "true"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", "false"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest", "true"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_prediction_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - PredictionServiceClient, PredictionServiceAsyncClient -]) -@mock.patch.object(PredictionServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PredictionServiceAsyncClient)) -def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - PredictionServiceClient, PredictionServiceAsyncClient -]) -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -def test_prediction_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), -]) -def test_prediction_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", grpc_helpers), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest", None), -]) -def test_prediction_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_prediction_service_client_client_options_from_dict(): - with mock.patch('google.cloud.automl_v1.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PredictionServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", grpc_helpers), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_prediction_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.PredictRequest, - dict, -]) -def test_predict(request_type, transport: str = 'grpc'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = prediction_service.PredictResponse( - ) - response = client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = prediction_service.PredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, prediction_service.PredictResponse) - - -def test_predict_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = prediction_service.PredictRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.predict(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == prediction_service.PredictRequest( - name='name_value', - ) - -def test_predict_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.predict] = mock_rpc - request = {} - client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.predict in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.predict] = mock_rpc - - request = {} - await client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_predict_async(transport: str = 'grpc_asyncio', request_type=prediction_service.PredictRequest): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse( - )) - response = await client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = prediction_service.PredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, prediction_service.PredictResponse) - - -@pytest.mark.asyncio -async def test_predict_async_from_dict(): - await test_predict_async(request_type=dict) - -def test_predict_field_headers(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.PredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value = prediction_service.PredictResponse() - client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_predict_field_headers_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.PredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse()) - await client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_predict_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = prediction_service.PredictResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.predict( - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].payload - mock_val = data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - - -def test_predict_flattened_error(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.predict( - prediction_service.PredictRequest(), - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - -@pytest.mark.asyncio -async def test_predict_flattened_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = prediction_service.PredictResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.predict( - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].payload - mock_val = data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - -@pytest.mark.asyncio -async def test_predict_flattened_error_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.predict( - prediction_service.PredictRequest(), - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.BatchPredictRequest, - dict, -]) -def test_batch_predict(request_type, transport: str = 'grpc'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = prediction_service.BatchPredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_batch_predict_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = prediction_service.BatchPredictRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_predict(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == prediction_service.BatchPredictRequest( - name='name_value', - ) - -def test_batch_predict_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_predict] = mock_rpc - request = {} - client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_predict in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_predict] = mock_rpc - - request = {} - await client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.batch_predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_predict_async(transport: str = 'grpc_asyncio', request_type=prediction_service.BatchPredictRequest): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = prediction_service.BatchPredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_batch_predict_async_from_dict(): - await test_batch_predict_async(request_type=dict) - -def test_batch_predict_field_headers(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.BatchPredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_predict_field_headers_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.BatchPredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_batch_predict_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_predict( - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - arg = args[0].output_config - mock_val = io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - - -def test_batch_predict_flattened_error(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_predict( - prediction_service.BatchPredictRequest(), - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - -@pytest.mark.asyncio -async def test_batch_predict_flattened_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_predict( - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - arg = args[0].output_config - mock_val = io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_predict_flattened_error_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_predict( - prediction_service.BatchPredictRequest(), - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - -def test_predict_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.predict] = mock_rpc - - request = {} - client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): - transport_class = transports.PredictionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = prediction_service.PredictResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = prediction_service.PredictResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.predict(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_predict_rest_unset_required_fields(): - transport = transports.PredictionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.predict._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "payload", ))) - - -def test_predict_rest_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = prediction_service.PredictResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = prediction_service.PredictResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.predict(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}:predict" % client.transport._host, args[1]) - - -def test_predict_rest_flattened_error(transport: str = 'rest'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.predict( - prediction_service.PredictRequest(), - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - -def test_batch_predict_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_predict] = mock_rpc - - request = {} - client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_predict_rest_required_fields(request_type=prediction_service.BatchPredictRequest): - transport_class = transports.PredictionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_predict(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_predict_rest_unset_required_fields(): - transport = transports.PredictionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_predict._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "inputConfig", "outputConfig", ))) - - -def test_batch_predict_rest_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.batch_predict(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/models/*}:batchPredict" % client.transport._host, args[1]) - - -def test_batch_predict_rest_flattened_error(transport: str = 'rest'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_predict( - prediction_service.BatchPredictRequest(), - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PredictionServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PredictionServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceGrpcAsyncIOTransport, - transports.PredictionServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = PredictionServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_predict_empty_call_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value = prediction_service.PredictResponse() - client.predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.PredictRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_predict_empty_call_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.batch_predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.BatchPredictRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = PredictionServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_predict_empty_call_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse( - )) - await client.predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.PredictRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_predict_empty_call_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.batch_predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.BatchPredictRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = PredictionServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_predict_rest_bad_request(request_type=prediction_service.PredictRequest): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.predict(request) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.PredictRequest, - dict, -]) -def test_predict_rest_call_success(request_type): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = prediction_service.PredictResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = prediction_service.PredictResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.predict(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, prediction_service.PredictResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_predict_rest_interceptors(null_interceptor): - transport = transports.PredictionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PredictionServiceRestInterceptor(), - ) - client = PredictionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_predict") as post, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_predict_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "pre_predict") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = prediction_service.PredictRequest.pb(prediction_service.PredictRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = prediction_service.PredictResponse.to_json(prediction_service.PredictResponse()) - req.return_value.content = return_value - - request = prediction_service.PredictRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = prediction_service.PredictResponse() - post_with_metadata.return_value = prediction_service.PredictResponse(), metadata - - client.predict(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_predict_rest_bad_request(request_type=prediction_service.BatchPredictRequest): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_predict(request) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.BatchPredictRequest, - dict, -]) -def test_batch_predict_rest_call_success(request_type): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_predict(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_predict_rest_interceptors(null_interceptor): - transport = transports.PredictionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PredictionServiceRestInterceptor(), - ) - client = PredictionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_batch_predict") as post, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_batch_predict_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "pre_batch_predict") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = prediction_service.BatchPredictRequest.pb(prediction_service.BatchPredictRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = prediction_service.BatchPredictRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.batch_predict(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_predict_empty_call_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - client.predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.PredictRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_predict_empty_call_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - client.batch_predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.BatchPredictRequest() - - assert args[0] == request_msg - - -def test_prediction_service_rest_lro_client(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PredictionServiceGrpcTransport, - ) - -def test_prediction_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PredictionServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_prediction_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.automl_v1.services.prediction_service.transports.PredictionServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PredictionServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'predict', - 'batch_predict', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_prediction_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.automl_v1.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PredictionServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_prediction_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.automl_v1.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PredictionServiceTransport() - adc.assert_called_once() - - -def test_prediction_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PredictionServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceGrpcAsyncIOTransport, - ], -) -def test_prediction_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceGrpcAsyncIOTransport, - transports.PredictionServiceRestTransport, - ], -) -def test_prediction_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PredictionServiceGrpcTransport, grpc_helpers), - (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PredictionServiceGrpcTransport, transports.PredictionServiceGrpcAsyncIOTransport]) -def test_prediction_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_prediction_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.PredictionServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_prediction_service_host_no_port(transport_name): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_prediction_service_host_with_port(transport_name): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_prediction_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = PredictionServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = PredictionServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.predict._session - session2 = client2.transport.predict._session - assert session1 != session2 - session1 = client1.transport.batch_predict._session - session2 = client2.transport.batch_predict._session - assert session1 != session2 -def test_prediction_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PredictionServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_prediction_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PredictionServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PredictionServiceGrpcTransport, transports.PredictionServiceGrpcAsyncIOTransport]) -def test_prediction_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PredictionServiceGrpcTransport, transports.PredictionServiceGrpcAsyncIOTransport]) -def test_prediction_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_prediction_service_grpc_lro_client(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_prediction_service_grpc_lro_async_client(): - client = PredictionServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_model_path(): - project = "squid" - location = "clam" - model = "whelk" - expected = "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - actual = PredictionServiceClient.model_path(project, location, model) - assert expected == actual - - -def test_parse_model_path(): - expected = { - "project": "octopus", - "location": "oyster", - "model": "nudibranch", - } - path = PredictionServiceClient.model_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_model_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PredictionServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = PredictionServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = PredictionServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = PredictionServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PredictionServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = PredictionServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = PredictionServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = PredictionServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PredictionServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = PredictionServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PredictionServiceTransport, '_prep_wrapped_messages') as prep: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PredictionServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = PredictionServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-automl/v1beta1/.coveragerc deleted file mode 100644 index 8705cefded5c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/automl/__init__.py - google/cloud/automl/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/.flake8 b/owl-bot-staging/google-cloud-automl/v1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-automl/v1beta1/MANIFEST.in deleted file mode 100644 index ba1872217709..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/automl *.py -recursive-include google/cloud/automl_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/README.rst b/owl-bot-staging/google-cloud-automl/v1beta1/README.rst deleted file mode 100644 index 69413aa1fb0f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Automl API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Automl API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-automl/v1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/auto_ml.rst b/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/auto_ml.rst deleted file mode 100644 index ddb02f63aeba..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/auto_ml.rst +++ /dev/null @@ -1,10 +0,0 @@ -AutoMl ------------------------- - -.. automodule:: google.cloud.automl_v1beta1.services.auto_ml - :members: - :inherited-members: - -.. automodule:: google.cloud.automl_v1beta1.services.auto_ml.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/prediction_service.rst b/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/prediction_service.rst deleted file mode 100644 index e234e69fb639..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/prediction_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -PredictionService ------------------------------------ - -.. automodule:: google.cloud.automl_v1beta1.services.prediction_service - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/services_.rst b/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/services_.rst deleted file mode 100644 index ebd9c7c83d00..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/services_.rst +++ /dev/null @@ -1,7 +0,0 @@ -Services for Google Cloud Automl v1beta1 API -============================================ -.. toctree:: - :maxdepth: 2 - - auto_ml - prediction_service diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/types_.rst b/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/types_.rst deleted file mode 100644 index b50b55f69395..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/automl_v1beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Automl v1beta1 API -========================================= - -.. automodule:: google.cloud.automl_v1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-automl/v1beta1/docs/conf.py deleted file mode 100644 index 92c859a77f79..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-automl documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-automl" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-automl-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-automl.tex", - u"google-cloud-automl Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-automl", - u"Google Cloud Automl Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-automl", - u"google-cloud-automl Documentation", - author, - "google-cloud-automl", - "GAPIC library for Google Cloud Automl API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-automl/v1beta1/docs/index.rst deleted file mode 100644 index 3489c9d5c273..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - automl_v1beta1/services_ - automl_v1beta1/types_ diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/__init__.py deleted file mode 100644 index c04292ba5804..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/__init__.py +++ /dev/null @@ -1,275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.automl import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.automl_v1beta1.services.auto_ml.client import AutoMlClient -from google.cloud.automl_v1beta1.services.auto_ml.async_client import AutoMlAsyncClient -from google.cloud.automl_v1beta1.services.prediction_service.client import PredictionServiceClient -from google.cloud.automl_v1beta1.services.prediction_service.async_client import PredictionServiceAsyncClient - -from google.cloud.automl_v1beta1.types.annotation_payload import AnnotationPayload -from google.cloud.automl_v1beta1.types.annotation_spec import AnnotationSpec -from google.cloud.automl_v1beta1.types.classification import ClassificationAnnotation -from google.cloud.automl_v1beta1.types.classification import ClassificationEvaluationMetrics -from google.cloud.automl_v1beta1.types.classification import VideoClassificationAnnotation -from google.cloud.automl_v1beta1.types.classification import ClassificationType -from google.cloud.automl_v1beta1.types.column_spec import ColumnSpec -from google.cloud.automl_v1beta1.types.data_items import Document -from google.cloud.automl_v1beta1.types.data_items import DocumentDimensions -from google.cloud.automl_v1beta1.types.data_items import ExamplePayload -from google.cloud.automl_v1beta1.types.data_items import Image -from google.cloud.automl_v1beta1.types.data_items import Row -from google.cloud.automl_v1beta1.types.data_items import TextSnippet -from google.cloud.automl_v1beta1.types.data_stats import ArrayStats -from google.cloud.automl_v1beta1.types.data_stats import CategoryStats -from google.cloud.automl_v1beta1.types.data_stats import CorrelationStats -from google.cloud.automl_v1beta1.types.data_stats import DataStats -from google.cloud.automl_v1beta1.types.data_stats import Float64Stats -from google.cloud.automl_v1beta1.types.data_stats import StringStats -from google.cloud.automl_v1beta1.types.data_stats import StructStats -from google.cloud.automl_v1beta1.types.data_stats import TimestampStats -from google.cloud.automl_v1beta1.types.data_types import DataType -from google.cloud.automl_v1beta1.types.data_types import StructType -from google.cloud.automl_v1beta1.types.data_types import TypeCode -from google.cloud.automl_v1beta1.types.dataset import Dataset -from google.cloud.automl_v1beta1.types.detection import BoundingBoxMetricsEntry -from google.cloud.automl_v1beta1.types.detection import ImageObjectDetectionAnnotation -from google.cloud.automl_v1beta1.types.detection import ImageObjectDetectionEvaluationMetrics -from google.cloud.automl_v1beta1.types.detection import VideoObjectTrackingAnnotation -from google.cloud.automl_v1beta1.types.detection import VideoObjectTrackingEvaluationMetrics -from google.cloud.automl_v1beta1.types.geometry import BoundingPoly -from google.cloud.automl_v1beta1.types.geometry import NormalizedVertex -from google.cloud.automl_v1beta1.types.image import ImageClassificationDatasetMetadata -from google.cloud.automl_v1beta1.types.image import ImageClassificationModelDeploymentMetadata -from google.cloud.automl_v1beta1.types.image import ImageClassificationModelMetadata -from google.cloud.automl_v1beta1.types.image import ImageObjectDetectionDatasetMetadata -from google.cloud.automl_v1beta1.types.image import ImageObjectDetectionModelDeploymentMetadata -from google.cloud.automl_v1beta1.types.image import ImageObjectDetectionModelMetadata -from google.cloud.automl_v1beta1.types.io import BatchPredictInputConfig -from google.cloud.automl_v1beta1.types.io import BatchPredictOutputConfig -from google.cloud.automl_v1beta1.types.io import BigQueryDestination -from google.cloud.automl_v1beta1.types.io import BigQuerySource -from google.cloud.automl_v1beta1.types.io import DocumentInputConfig -from google.cloud.automl_v1beta1.types.io import ExportEvaluatedExamplesOutputConfig -from google.cloud.automl_v1beta1.types.io import GcrDestination -from google.cloud.automl_v1beta1.types.io import GcsDestination -from google.cloud.automl_v1beta1.types.io import GcsSource -from google.cloud.automl_v1beta1.types.io import InputConfig -from google.cloud.automl_v1beta1.types.io import ModelExportOutputConfig -from google.cloud.automl_v1beta1.types.io import OutputConfig -from google.cloud.automl_v1beta1.types.model import Model -from google.cloud.automl_v1beta1.types.model_evaluation import ModelEvaluation -from google.cloud.automl_v1beta1.types.operations import BatchPredictOperationMetadata -from google.cloud.automl_v1beta1.types.operations import CreateModelOperationMetadata -from google.cloud.automl_v1beta1.types.operations import DeleteOperationMetadata -from google.cloud.automl_v1beta1.types.operations import DeployModelOperationMetadata -from google.cloud.automl_v1beta1.types.operations import ExportDataOperationMetadata -from google.cloud.automl_v1beta1.types.operations import ExportEvaluatedExamplesOperationMetadata -from google.cloud.automl_v1beta1.types.operations import ExportModelOperationMetadata -from google.cloud.automl_v1beta1.types.operations import ImportDataOperationMetadata -from google.cloud.automl_v1beta1.types.operations import OperationMetadata -from google.cloud.automl_v1beta1.types.operations import UndeployModelOperationMetadata -from google.cloud.automl_v1beta1.types.prediction_service import BatchPredictRequest -from google.cloud.automl_v1beta1.types.prediction_service import BatchPredictResult -from google.cloud.automl_v1beta1.types.prediction_service import PredictRequest -from google.cloud.automl_v1beta1.types.prediction_service import PredictResponse -from google.cloud.automl_v1beta1.types.ranges import DoubleRange -from google.cloud.automl_v1beta1.types.regression import RegressionEvaluationMetrics -from google.cloud.automl_v1beta1.types.service import CreateDatasetRequest -from google.cloud.automl_v1beta1.types.service import CreateModelRequest -from google.cloud.automl_v1beta1.types.service import DeleteDatasetRequest -from google.cloud.automl_v1beta1.types.service import DeleteModelRequest -from google.cloud.automl_v1beta1.types.service import DeployModelRequest -from google.cloud.automl_v1beta1.types.service import ExportDataRequest -from google.cloud.automl_v1beta1.types.service import ExportEvaluatedExamplesRequest -from google.cloud.automl_v1beta1.types.service import ExportModelRequest -from google.cloud.automl_v1beta1.types.service import GetAnnotationSpecRequest -from google.cloud.automl_v1beta1.types.service import GetColumnSpecRequest -from google.cloud.automl_v1beta1.types.service import GetDatasetRequest -from google.cloud.automl_v1beta1.types.service import GetModelEvaluationRequest -from google.cloud.automl_v1beta1.types.service import GetModelRequest -from google.cloud.automl_v1beta1.types.service import GetTableSpecRequest -from google.cloud.automl_v1beta1.types.service import ImportDataRequest -from google.cloud.automl_v1beta1.types.service import ListColumnSpecsRequest -from google.cloud.automl_v1beta1.types.service import ListColumnSpecsResponse -from google.cloud.automl_v1beta1.types.service import ListDatasetsRequest -from google.cloud.automl_v1beta1.types.service import ListDatasetsResponse -from google.cloud.automl_v1beta1.types.service import ListModelEvaluationsRequest -from google.cloud.automl_v1beta1.types.service import ListModelEvaluationsResponse -from google.cloud.automl_v1beta1.types.service import ListModelsRequest -from google.cloud.automl_v1beta1.types.service import ListModelsResponse -from google.cloud.automl_v1beta1.types.service import ListTableSpecsRequest -from google.cloud.automl_v1beta1.types.service import ListTableSpecsResponse -from google.cloud.automl_v1beta1.types.service import UndeployModelRequest -from google.cloud.automl_v1beta1.types.service import UpdateColumnSpecRequest -from google.cloud.automl_v1beta1.types.service import UpdateDatasetRequest -from google.cloud.automl_v1beta1.types.service import UpdateTableSpecRequest -from google.cloud.automl_v1beta1.types.table_spec import TableSpec -from google.cloud.automl_v1beta1.types.tables import TablesAnnotation -from google.cloud.automl_v1beta1.types.tables import TablesDatasetMetadata -from google.cloud.automl_v1beta1.types.tables import TablesModelColumnInfo -from google.cloud.automl_v1beta1.types.tables import TablesModelMetadata -from google.cloud.automl_v1beta1.types.temporal import TimeSegment -from google.cloud.automl_v1beta1.types.text import TextClassificationDatasetMetadata -from google.cloud.automl_v1beta1.types.text import TextClassificationModelMetadata -from google.cloud.automl_v1beta1.types.text import TextExtractionDatasetMetadata -from google.cloud.automl_v1beta1.types.text import TextExtractionModelMetadata -from google.cloud.automl_v1beta1.types.text import TextSentimentDatasetMetadata -from google.cloud.automl_v1beta1.types.text import TextSentimentModelMetadata -from google.cloud.automl_v1beta1.types.text_extraction import TextExtractionAnnotation -from google.cloud.automl_v1beta1.types.text_extraction import TextExtractionEvaluationMetrics -from google.cloud.automl_v1beta1.types.text_segment import TextSegment -from google.cloud.automl_v1beta1.types.text_sentiment import TextSentimentAnnotation -from google.cloud.automl_v1beta1.types.text_sentiment import TextSentimentEvaluationMetrics -from google.cloud.automl_v1beta1.types.translation import TranslationAnnotation -from google.cloud.automl_v1beta1.types.translation import TranslationDatasetMetadata -from google.cloud.automl_v1beta1.types.translation import TranslationEvaluationMetrics -from google.cloud.automl_v1beta1.types.translation import TranslationModelMetadata -from google.cloud.automl_v1beta1.types.video import VideoClassificationDatasetMetadata -from google.cloud.automl_v1beta1.types.video import VideoClassificationModelMetadata -from google.cloud.automl_v1beta1.types.video import VideoObjectTrackingDatasetMetadata -from google.cloud.automl_v1beta1.types.video import VideoObjectTrackingModelMetadata - -__all__ = ('AutoMlClient', - 'AutoMlAsyncClient', - 'PredictionServiceClient', - 'PredictionServiceAsyncClient', - 'AnnotationPayload', - 'AnnotationSpec', - 'ClassificationAnnotation', - 'ClassificationEvaluationMetrics', - 'VideoClassificationAnnotation', - 'ClassificationType', - 'ColumnSpec', - 'Document', - 'DocumentDimensions', - 'ExamplePayload', - 'Image', - 'Row', - 'TextSnippet', - 'ArrayStats', - 'CategoryStats', - 'CorrelationStats', - 'DataStats', - 'Float64Stats', - 'StringStats', - 'StructStats', - 'TimestampStats', - 'DataType', - 'StructType', - 'TypeCode', - 'Dataset', - 'BoundingBoxMetricsEntry', - 'ImageObjectDetectionAnnotation', - 'ImageObjectDetectionEvaluationMetrics', - 'VideoObjectTrackingAnnotation', - 'VideoObjectTrackingEvaluationMetrics', - 'BoundingPoly', - 'NormalizedVertex', - 'ImageClassificationDatasetMetadata', - 'ImageClassificationModelDeploymentMetadata', - 'ImageClassificationModelMetadata', - 'ImageObjectDetectionDatasetMetadata', - 'ImageObjectDetectionModelDeploymentMetadata', - 'ImageObjectDetectionModelMetadata', - 'BatchPredictInputConfig', - 'BatchPredictOutputConfig', - 'BigQueryDestination', - 'BigQuerySource', - 'DocumentInputConfig', - 'ExportEvaluatedExamplesOutputConfig', - 'GcrDestination', - 'GcsDestination', - 'GcsSource', - 'InputConfig', - 'ModelExportOutputConfig', - 'OutputConfig', - 'Model', - 'ModelEvaluation', - 'BatchPredictOperationMetadata', - 'CreateModelOperationMetadata', - 'DeleteOperationMetadata', - 'DeployModelOperationMetadata', - 'ExportDataOperationMetadata', - 'ExportEvaluatedExamplesOperationMetadata', - 'ExportModelOperationMetadata', - 'ImportDataOperationMetadata', - 'OperationMetadata', - 'UndeployModelOperationMetadata', - 'BatchPredictRequest', - 'BatchPredictResult', - 'PredictRequest', - 'PredictResponse', - 'DoubleRange', - 'RegressionEvaluationMetrics', - 'CreateDatasetRequest', - 'CreateModelRequest', - 'DeleteDatasetRequest', - 'DeleteModelRequest', - 'DeployModelRequest', - 'ExportDataRequest', - 'ExportEvaluatedExamplesRequest', - 'ExportModelRequest', - 'GetAnnotationSpecRequest', - 'GetColumnSpecRequest', - 'GetDatasetRequest', - 'GetModelEvaluationRequest', - 'GetModelRequest', - 'GetTableSpecRequest', - 'ImportDataRequest', - 'ListColumnSpecsRequest', - 'ListColumnSpecsResponse', - 'ListDatasetsRequest', - 'ListDatasetsResponse', - 'ListModelEvaluationsRequest', - 'ListModelEvaluationsResponse', - 'ListModelsRequest', - 'ListModelsResponse', - 'ListTableSpecsRequest', - 'ListTableSpecsResponse', - 'UndeployModelRequest', - 'UpdateColumnSpecRequest', - 'UpdateDatasetRequest', - 'UpdateTableSpecRequest', - 'TableSpec', - 'TablesAnnotation', - 'TablesDatasetMetadata', - 'TablesModelColumnInfo', - 'TablesModelMetadata', - 'TimeSegment', - 'TextClassificationDatasetMetadata', - 'TextClassificationModelMetadata', - 'TextExtractionDatasetMetadata', - 'TextExtractionModelMetadata', - 'TextSentimentDatasetMetadata', - 'TextSentimentModelMetadata', - 'TextExtractionAnnotation', - 'TextExtractionEvaluationMetrics', - 'TextSegment', - 'TextSentimentAnnotation', - 'TextSentimentEvaluationMetrics', - 'TranslationAnnotation', - 'TranslationDatasetMetadata', - 'TranslationEvaluationMetrics', - 'TranslationModelMetadata', - 'VideoClassificationDatasetMetadata', - 'VideoClassificationModelMetadata', - 'VideoObjectTrackingDatasetMetadata', - 'VideoObjectTrackingModelMetadata', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/gapic_version.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/py.typed b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/py.typed deleted file mode 100644 index 0560ba18db71..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-automl package uses inline types. diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/__init__.py deleted file mode 100644 index 92ff5dcfb300..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/__init__.py +++ /dev/null @@ -1,276 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.automl_v1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.auto_ml import AutoMlClient -from .services.auto_ml import AutoMlAsyncClient -from .services.prediction_service import PredictionServiceClient -from .services.prediction_service import PredictionServiceAsyncClient - -from .types.annotation_payload import AnnotationPayload -from .types.annotation_spec import AnnotationSpec -from .types.classification import ClassificationAnnotation -from .types.classification import ClassificationEvaluationMetrics -from .types.classification import VideoClassificationAnnotation -from .types.classification import ClassificationType -from .types.column_spec import ColumnSpec -from .types.data_items import Document -from .types.data_items import DocumentDimensions -from .types.data_items import ExamplePayload -from .types.data_items import Image -from .types.data_items import Row -from .types.data_items import TextSnippet -from .types.data_stats import ArrayStats -from .types.data_stats import CategoryStats -from .types.data_stats import CorrelationStats -from .types.data_stats import DataStats -from .types.data_stats import Float64Stats -from .types.data_stats import StringStats -from .types.data_stats import StructStats -from .types.data_stats import TimestampStats -from .types.data_types import DataType -from .types.data_types import StructType -from .types.data_types import TypeCode -from .types.dataset import Dataset -from .types.detection import BoundingBoxMetricsEntry -from .types.detection import ImageObjectDetectionAnnotation -from .types.detection import ImageObjectDetectionEvaluationMetrics -from .types.detection import VideoObjectTrackingAnnotation -from .types.detection import VideoObjectTrackingEvaluationMetrics -from .types.geometry import BoundingPoly -from .types.geometry import NormalizedVertex -from .types.image import ImageClassificationDatasetMetadata -from .types.image import ImageClassificationModelDeploymentMetadata -from .types.image import ImageClassificationModelMetadata -from .types.image import ImageObjectDetectionDatasetMetadata -from .types.image import ImageObjectDetectionModelDeploymentMetadata -from .types.image import ImageObjectDetectionModelMetadata -from .types.io import BatchPredictInputConfig -from .types.io import BatchPredictOutputConfig -from .types.io import BigQueryDestination -from .types.io import BigQuerySource -from .types.io import DocumentInputConfig -from .types.io import ExportEvaluatedExamplesOutputConfig -from .types.io import GcrDestination -from .types.io import GcsDestination -from .types.io import GcsSource -from .types.io import InputConfig -from .types.io import ModelExportOutputConfig -from .types.io import OutputConfig -from .types.model import Model -from .types.model_evaluation import ModelEvaluation -from .types.operations import BatchPredictOperationMetadata -from .types.operations import CreateModelOperationMetadata -from .types.operations import DeleteOperationMetadata -from .types.operations import DeployModelOperationMetadata -from .types.operations import ExportDataOperationMetadata -from .types.operations import ExportEvaluatedExamplesOperationMetadata -from .types.operations import ExportModelOperationMetadata -from .types.operations import ImportDataOperationMetadata -from .types.operations import OperationMetadata -from .types.operations import UndeployModelOperationMetadata -from .types.prediction_service import BatchPredictRequest -from .types.prediction_service import BatchPredictResult -from .types.prediction_service import PredictRequest -from .types.prediction_service import PredictResponse -from .types.ranges import DoubleRange -from .types.regression import RegressionEvaluationMetrics -from .types.service import CreateDatasetRequest -from .types.service import CreateModelRequest -from .types.service import DeleteDatasetRequest -from .types.service import DeleteModelRequest -from .types.service import DeployModelRequest -from .types.service import ExportDataRequest -from .types.service import ExportEvaluatedExamplesRequest -from .types.service import ExportModelRequest -from .types.service import GetAnnotationSpecRequest -from .types.service import GetColumnSpecRequest -from .types.service import GetDatasetRequest -from .types.service import GetModelEvaluationRequest -from .types.service import GetModelRequest -from .types.service import GetTableSpecRequest -from .types.service import ImportDataRequest -from .types.service import ListColumnSpecsRequest -from .types.service import ListColumnSpecsResponse -from .types.service import ListDatasetsRequest -from .types.service import ListDatasetsResponse -from .types.service import ListModelEvaluationsRequest -from .types.service import ListModelEvaluationsResponse -from .types.service import ListModelsRequest -from .types.service import ListModelsResponse -from .types.service import ListTableSpecsRequest -from .types.service import ListTableSpecsResponse -from .types.service import UndeployModelRequest -from .types.service import UpdateColumnSpecRequest -from .types.service import UpdateDatasetRequest -from .types.service import UpdateTableSpecRequest -from .types.table_spec import TableSpec -from .types.tables import TablesAnnotation -from .types.tables import TablesDatasetMetadata -from .types.tables import TablesModelColumnInfo -from .types.tables import TablesModelMetadata -from .types.temporal import TimeSegment -from .types.text import TextClassificationDatasetMetadata -from .types.text import TextClassificationModelMetadata -from .types.text import TextExtractionDatasetMetadata -from .types.text import TextExtractionModelMetadata -from .types.text import TextSentimentDatasetMetadata -from .types.text import TextSentimentModelMetadata -from .types.text_extraction import TextExtractionAnnotation -from .types.text_extraction import TextExtractionEvaluationMetrics -from .types.text_segment import TextSegment -from .types.text_sentiment import TextSentimentAnnotation -from .types.text_sentiment import TextSentimentEvaluationMetrics -from .types.translation import TranslationAnnotation -from .types.translation import TranslationDatasetMetadata -from .types.translation import TranslationEvaluationMetrics -from .types.translation import TranslationModelMetadata -from .types.video import VideoClassificationDatasetMetadata -from .types.video import VideoClassificationModelMetadata -from .types.video import VideoObjectTrackingDatasetMetadata -from .types.video import VideoObjectTrackingModelMetadata - -__all__ = ( - 'AutoMlAsyncClient', - 'PredictionServiceAsyncClient', -'AnnotationPayload', -'AnnotationSpec', -'ArrayStats', -'AutoMlClient', -'BatchPredictInputConfig', -'BatchPredictOperationMetadata', -'BatchPredictOutputConfig', -'BatchPredictRequest', -'BatchPredictResult', -'BigQueryDestination', -'BigQuerySource', -'BoundingBoxMetricsEntry', -'BoundingPoly', -'CategoryStats', -'ClassificationAnnotation', -'ClassificationEvaluationMetrics', -'ClassificationType', -'ColumnSpec', -'CorrelationStats', -'CreateDatasetRequest', -'CreateModelOperationMetadata', -'CreateModelRequest', -'DataStats', -'DataType', -'Dataset', -'DeleteDatasetRequest', -'DeleteModelRequest', -'DeleteOperationMetadata', -'DeployModelOperationMetadata', -'DeployModelRequest', -'Document', -'DocumentDimensions', -'DocumentInputConfig', -'DoubleRange', -'ExamplePayload', -'ExportDataOperationMetadata', -'ExportDataRequest', -'ExportEvaluatedExamplesOperationMetadata', -'ExportEvaluatedExamplesOutputConfig', -'ExportEvaluatedExamplesRequest', -'ExportModelOperationMetadata', -'ExportModelRequest', -'Float64Stats', -'GcrDestination', -'GcsDestination', -'GcsSource', -'GetAnnotationSpecRequest', -'GetColumnSpecRequest', -'GetDatasetRequest', -'GetModelEvaluationRequest', -'GetModelRequest', -'GetTableSpecRequest', -'Image', -'ImageClassificationDatasetMetadata', -'ImageClassificationModelDeploymentMetadata', -'ImageClassificationModelMetadata', -'ImageObjectDetectionAnnotation', -'ImageObjectDetectionDatasetMetadata', -'ImageObjectDetectionEvaluationMetrics', -'ImageObjectDetectionModelDeploymentMetadata', -'ImageObjectDetectionModelMetadata', -'ImportDataOperationMetadata', -'ImportDataRequest', -'InputConfig', -'ListColumnSpecsRequest', -'ListColumnSpecsResponse', -'ListDatasetsRequest', -'ListDatasetsResponse', -'ListModelEvaluationsRequest', -'ListModelEvaluationsResponse', -'ListModelsRequest', -'ListModelsResponse', -'ListTableSpecsRequest', -'ListTableSpecsResponse', -'Model', -'ModelEvaluation', -'ModelExportOutputConfig', -'NormalizedVertex', -'OperationMetadata', -'OutputConfig', -'PredictRequest', -'PredictResponse', -'PredictionServiceClient', -'RegressionEvaluationMetrics', -'Row', -'StringStats', -'StructStats', -'StructType', -'TableSpec', -'TablesAnnotation', -'TablesDatasetMetadata', -'TablesModelColumnInfo', -'TablesModelMetadata', -'TextClassificationDatasetMetadata', -'TextClassificationModelMetadata', -'TextExtractionAnnotation', -'TextExtractionDatasetMetadata', -'TextExtractionEvaluationMetrics', -'TextExtractionModelMetadata', -'TextSegment', -'TextSentimentAnnotation', -'TextSentimentDatasetMetadata', -'TextSentimentEvaluationMetrics', -'TextSentimentModelMetadata', -'TextSnippet', -'TimeSegment', -'TimestampStats', -'TranslationAnnotation', -'TranslationDatasetMetadata', -'TranslationEvaluationMetrics', -'TranslationModelMetadata', -'TypeCode', -'UndeployModelOperationMetadata', -'UndeployModelRequest', -'UpdateColumnSpecRequest', -'UpdateDatasetRequest', -'UpdateTableSpecRequest', -'VideoClassificationAnnotation', -'VideoClassificationDatasetMetadata', -'VideoClassificationModelMetadata', -'VideoObjectTrackingAnnotation', -'VideoObjectTrackingDatasetMetadata', -'VideoObjectTrackingEvaluationMetrics', -'VideoObjectTrackingModelMetadata', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/gapic_metadata.json deleted file mode 100644 index 74e852891e93..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,437 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.automl_v1beta1", - "protoPackage": "google.cloud.automl.v1beta1", - "schema": "1.0", - "services": { - "AutoMl": { - "clients": { - "grpc": { - "libraryClient": "AutoMlClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "CreateModel": { - "methods": [ - "create_model" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteModel": { - "methods": [ - "delete_model" - ] - }, - "DeployModel": { - "methods": [ - "deploy_model" - ] - }, - "ExportData": { - "methods": [ - "export_data" - ] - }, - "ExportEvaluatedExamples": { - "methods": [ - "export_evaluated_examples" - ] - }, - "ExportModel": { - "methods": [ - "export_model" - ] - }, - "GetAnnotationSpec": { - "methods": [ - "get_annotation_spec" - ] - }, - "GetColumnSpec": { - "methods": [ - "get_column_spec" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "GetModel": { - "methods": [ - "get_model" - ] - }, - "GetModelEvaluation": { - "methods": [ - "get_model_evaluation" - ] - }, - "GetTableSpec": { - "methods": [ - "get_table_spec" - ] - }, - "ImportData": { - "methods": [ - "import_data" - ] - }, - "ListColumnSpecs": { - "methods": [ - "list_column_specs" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "ListModelEvaluations": { - "methods": [ - "list_model_evaluations" - ] - }, - "ListModels": { - "methods": [ - "list_models" - ] - }, - "ListTableSpecs": { - "methods": [ - "list_table_specs" - ] - }, - "UndeployModel": { - "methods": [ - "undeploy_model" - ] - }, - "UpdateColumnSpec": { - "methods": [ - "update_column_spec" - ] - }, - "UpdateDataset": { - "methods": [ - "update_dataset" - ] - }, - "UpdateTableSpec": { - "methods": [ - "update_table_spec" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AutoMlAsyncClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "CreateModel": { - "methods": [ - "create_model" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteModel": { - "methods": [ - "delete_model" - ] - }, - "DeployModel": { - "methods": [ - "deploy_model" - ] - }, - "ExportData": { - "methods": [ - "export_data" - ] - }, - "ExportEvaluatedExamples": { - "methods": [ - "export_evaluated_examples" - ] - }, - "ExportModel": { - "methods": [ - "export_model" - ] - }, - "GetAnnotationSpec": { - "methods": [ - "get_annotation_spec" - ] - }, - "GetColumnSpec": { - "methods": [ - "get_column_spec" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "GetModel": { - "methods": [ - "get_model" - ] - }, - "GetModelEvaluation": { - "methods": [ - "get_model_evaluation" - ] - }, - "GetTableSpec": { - "methods": [ - "get_table_spec" - ] - }, - "ImportData": { - "methods": [ - "import_data" - ] - }, - "ListColumnSpecs": { - "methods": [ - "list_column_specs" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "ListModelEvaluations": { - "methods": [ - "list_model_evaluations" - ] - }, - "ListModels": { - "methods": [ - "list_models" - ] - }, - "ListTableSpecs": { - "methods": [ - "list_table_specs" - ] - }, - "UndeployModel": { - "methods": [ - "undeploy_model" - ] - }, - "UpdateColumnSpec": { - "methods": [ - "update_column_spec" - ] - }, - "UpdateDataset": { - "methods": [ - "update_dataset" - ] - }, - "UpdateTableSpec": { - "methods": [ - "update_table_spec" - ] - } - } - }, - "rest": { - "libraryClient": "AutoMlClient", - "rpcs": { - "CreateDataset": { - "methods": [ - "create_dataset" - ] - }, - "CreateModel": { - "methods": [ - "create_model" - ] - }, - "DeleteDataset": { - "methods": [ - "delete_dataset" - ] - }, - "DeleteModel": { - "methods": [ - "delete_model" - ] - }, - "DeployModel": { - "methods": [ - "deploy_model" - ] - }, - "ExportData": { - "methods": [ - "export_data" - ] - }, - "ExportEvaluatedExamples": { - "methods": [ - "export_evaluated_examples" - ] - }, - "ExportModel": { - "methods": [ - "export_model" - ] - }, - "GetAnnotationSpec": { - "methods": [ - "get_annotation_spec" - ] - }, - "GetColumnSpec": { - "methods": [ - "get_column_spec" - ] - }, - "GetDataset": { - "methods": [ - "get_dataset" - ] - }, - "GetModel": { - "methods": [ - "get_model" - ] - }, - "GetModelEvaluation": { - "methods": [ - "get_model_evaluation" - ] - }, - "GetTableSpec": { - "methods": [ - "get_table_spec" - ] - }, - "ImportData": { - "methods": [ - "import_data" - ] - }, - "ListColumnSpecs": { - "methods": [ - "list_column_specs" - ] - }, - "ListDatasets": { - "methods": [ - "list_datasets" - ] - }, - "ListModelEvaluations": { - "methods": [ - "list_model_evaluations" - ] - }, - "ListModels": { - "methods": [ - "list_models" - ] - }, - "ListTableSpecs": { - "methods": [ - "list_table_specs" - ] - }, - "UndeployModel": { - "methods": [ - "undeploy_model" - ] - }, - "UpdateColumnSpec": { - "methods": [ - "update_column_spec" - ] - }, - "UpdateDataset": { - "methods": [ - "update_dataset" - ] - }, - "UpdateTableSpec": { - "methods": [ - "update_table_spec" - ] - } - } - } - } - }, - "PredictionService": { - "clients": { - "grpc": { - "libraryClient": "PredictionServiceClient", - "rpcs": { - "BatchPredict": { - "methods": [ - "batch_predict" - ] - }, - "Predict": { - "methods": [ - "predict" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PredictionServiceAsyncClient", - "rpcs": { - "BatchPredict": { - "methods": [ - "batch_predict" - ] - }, - "Predict": { - "methods": [ - "predict" - ] - } - } - }, - "rest": { - "libraryClient": "PredictionServiceClient", - "rpcs": { - "BatchPredict": { - "methods": [ - "batch_predict" - ] - }, - "Predict": { - "methods": [ - "predict" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/py.typed b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/py.typed deleted file mode 100644 index 0560ba18db71..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-automl package uses inline types. diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/__init__.py deleted file mode 100644 index cbde42be45c8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AutoMlClient -from .async_client import AutoMlAsyncClient - -__all__ = ( - 'AutoMlClient', - 'AutoMlAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/async_client.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/async_client.py deleted file mode 100644 index ab90139f0b5e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/async_client.py +++ /dev/null @@ -1,3278 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.automl_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1beta1.services.auto_ml import pagers -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import classification -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import data_stats -from google.cloud.automl_v1beta1.types import data_types -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import detection -from google.cloud.automl_v1beta1.types import image -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model as gca_model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import operations -from google.cloud.automl_v1beta1.types import regression -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.cloud.automl_v1beta1.types import tables -from google.cloud.automl_v1beta1.types import text -from google.cloud.automl_v1beta1.types import text_extraction -from google.cloud.automl_v1beta1.types import text_sentiment -from google.cloud.automl_v1beta1.types import translation -from google.cloud.automl_v1beta1.types import video -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AutoMlTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AutoMlGrpcAsyncIOTransport -from .client import AutoMlClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AutoMlAsyncClient: - """AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - """ - - _client: AutoMlClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AutoMlClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AutoMlClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AutoMlClient._DEFAULT_UNIVERSE - - annotation_spec_path = staticmethod(AutoMlClient.annotation_spec_path) - parse_annotation_spec_path = staticmethod(AutoMlClient.parse_annotation_spec_path) - column_spec_path = staticmethod(AutoMlClient.column_spec_path) - parse_column_spec_path = staticmethod(AutoMlClient.parse_column_spec_path) - dataset_path = staticmethod(AutoMlClient.dataset_path) - parse_dataset_path = staticmethod(AutoMlClient.parse_dataset_path) - model_path = staticmethod(AutoMlClient.model_path) - parse_model_path = staticmethod(AutoMlClient.parse_model_path) - model_evaluation_path = staticmethod(AutoMlClient.model_evaluation_path) - parse_model_evaluation_path = staticmethod(AutoMlClient.parse_model_evaluation_path) - table_spec_path = staticmethod(AutoMlClient.table_spec_path) - parse_table_spec_path = staticmethod(AutoMlClient.parse_table_spec_path) - common_billing_account_path = staticmethod(AutoMlClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AutoMlClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AutoMlClient.common_folder_path) - parse_common_folder_path = staticmethod(AutoMlClient.parse_common_folder_path) - common_organization_path = staticmethod(AutoMlClient.common_organization_path) - parse_common_organization_path = staticmethod(AutoMlClient.parse_common_organization_path) - common_project_path = staticmethod(AutoMlClient.common_project_path) - parse_common_project_path = staticmethod(AutoMlClient.parse_common_project_path) - common_location_path = staticmethod(AutoMlClient.common_location_path) - parse_common_location_path = staticmethod(AutoMlClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlAsyncClient: The constructed client. - """ - return AutoMlClient.from_service_account_info.__func__(AutoMlAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlAsyncClient: The constructed client. - """ - return AutoMlClient.from_service_account_file.__func__(AutoMlAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AutoMlClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AutoMlTransport: - """Returns the transport used by the client instance. - - Returns: - AutoMlTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AutoMlClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AutoMlTransport, Callable[..., AutoMlTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the auto ml async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AutoMlTransport,Callable[..., AutoMlTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AutoMlTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AutoMlClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1beta1.AutoMlAsyncClient`.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "credentialsType": None, - } - ) - - async def create_dataset(self, - request: Optional[Union[service.CreateDatasetRequest, dict]] = None, - *, - parent: Optional[str] = None, - dataset: Optional[gca_dataset.Dataset] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_dataset.Dataset: - r"""Creates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_create_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - response = await client.create_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.CreateDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1beta1.AutoMl.CreateDataset]. - parent (:class:`str`): - Required. The resource name of the - project to create the dataset for. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - dataset (:class:`google.cloud.automl_v1beta1.types.Dataset`): - Required. The dataset to create. - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, dataset] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateDatasetRequest): - request = service.CreateDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if dataset is not None: - request.dataset = dataset - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_dataset(self, - request: Optional[Union[service.GetDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataset.Dataset: - r"""Gets a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_get_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.GetDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.GetDataset][google.cloud.automl.v1beta1.AutoMl.GetDataset]. - name (:class:`str`): - Required. The resource name of the - dataset to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetDatasetRequest): - request = service.GetDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_datasets(self, - request: Optional[Union[service.ListDatasetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatasetsAsyncPager: - r"""Lists datasets in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_list_datasets(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ListDatasetsRequest, dict]]): - The request object. Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - parent (:class:`str`): - Required. The resource name of the - project from which to list datasets. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListDatasetsAsyncPager: - Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListDatasetsRequest): - request = service.ListDatasetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_datasets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatasetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_dataset(self, - request: Optional[Union[service.UpdateDatasetRequest, dict]] = None, - *, - dataset: Optional[gca_dataset.Dataset] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_dataset.Dataset: - r"""Updates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_update_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = await client.update_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.UpdateDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1beta1.AutoMl.UpdateDataset] - dataset (:class:`google.cloud.automl_v1beta1.types.Dataset`): - Required. The dataset which replaces - the resource on the server. - - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [dataset] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateDatasetRequest): - request = service.UpdateDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if dataset is not None: - request.dataset = dataset - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("dataset.name", request.dataset.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dataset(self, - request: Optional[Union[service.DeleteDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_delete_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.DeleteDatasetRequest, dict]]): - The request object. Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1beta1.AutoMl.DeleteDataset]. - name (:class:`str`): - Required. The resource name of the - dataset to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteDatasetRequest): - request = service.DeleteDatasetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def import_data(self, - request: Optional[Union[service.ImportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1beta1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_import_data(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ImportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ImportDataRequest, dict]]): - The request object. Request message for - [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData]. - name (:class:`str`): - Required. Dataset name. Dataset must - already exist. All imported annotations - and examples will be added. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (:class:`google.cloud.automl_v1beta1.types.InputConfig`): - Required. The desired input location - and its domain specific semantics, if - any. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ImportDataRequest): - request = service.ImportDataRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.import_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def export_data(self, - request: Optional[Union[service.ExportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_export_data(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ExportDataRequest, dict]]): - The request object. Request message for - [AutoMl.ExportData][google.cloud.automl.v1beta1.AutoMl.ExportData]. - name (:class:`str`): - Required. The resource name of the - dataset. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1beta1.types.OutputConfig`): - Required. The desired output - location. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportDataRequest): - request = service.ExportDataRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_annotation_spec(self, - request: Optional[Union[service.GetAnnotationSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> annotation_spec.AnnotationSpec: - r"""Gets an annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_get_annotation_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_annotation_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.GetAnnotationSpecRequest, dict]]): - The request object. Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec]. - name (:class:`str`): - Required. The resource name of the - annotation spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.AnnotationSpec: - A definition of an annotation spec. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAnnotationSpecRequest): - request = service.GetAnnotationSpecRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_annotation_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_table_spec(self, - request: Optional[Union[service.GetTableSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> table_spec.TableSpec: - r"""Gets a table spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_get_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetTableSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.GetTableSpecRequest, dict]]): - The request object. Request message for - [AutoMl.GetTableSpec][google.cloud.automl.v1beta1.AutoMl.GetTableSpec]. - name (:class:`str`): - Required. The resource name of the - table spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.TableSpec: - A specification of a relational table. - The table's schema is represented via its child - column specs. It is pre-populated as part of - ImportData by schema inference algorithm, the version - of which is a required parameter of ImportData - InputConfig. Note: While working with a table, at - times the schema may be inconsistent with the data in - the table (e.g. string in a FLOAT64 column). The - consistency validation is done upon creation of a - model. Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetTableSpecRequest): - request = service.GetTableSpecRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_table_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_table_specs(self, - request: Optional[Union[service.ListTableSpecsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTableSpecsAsyncPager: - r"""Lists table specs in a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_list_table_specs(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListTableSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_specs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ListTableSpecsRequest, dict]]): - The request object. Request message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - parent (:class:`str`): - Required. The resource name of the - dataset to list table specs from. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListTableSpecsAsyncPager: - Response message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListTableSpecsRequest): - request = service.ListTableSpecsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_table_specs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTableSpecsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_table_spec(self, - request: Optional[Union[service.UpdateTableSpecRequest, dict]] = None, - *, - table_spec: Optional[gca_table_spec.TableSpec] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_table_spec.TableSpec: - r"""Updates a table spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_update_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateTableSpecRequest( - ) - - # Make the request - response = await client.update_table_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.UpdateTableSpecRequest, dict]]): - The request object. Request message for - [AutoMl.UpdateTableSpec][google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec] - table_spec (:class:`google.cloud.automl_v1beta1.types.TableSpec`): - Required. The table spec which - replaces the resource on the server. - - This corresponds to the ``table_spec`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.TableSpec: - A specification of a relational table. - The table's schema is represented via its child - column specs. It is pre-populated as part of - ImportData by schema inference algorithm, the version - of which is a required parameter of ImportData - InputConfig. Note: While working with a table, at - times the schema may be inconsistent with the data in - the table (e.g. string in a FLOAT64 column). The - consistency validation is done upon creation of a - model. Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [table_spec] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateTableSpecRequest): - request = service.UpdateTableSpecRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if table_spec is not None: - request.table_spec = table_spec - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_table_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("table_spec.name", request.table_spec.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_column_spec(self, - request: Optional[Union[service.GetColumnSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> column_spec.ColumnSpec: - r"""Gets a column spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_get_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetColumnSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_column_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.GetColumnSpecRequest, dict]]): - The request object. Request message for - [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec]. - name (:class:`str`): - Required. The resource name of the - column spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.ColumnSpec: - A representation of a column in a relational table. When listing them, column specs are returned in the same order in which they were - given on import . Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetColumnSpecRequest): - request = service.GetColumnSpecRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_column_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_column_specs(self, - request: Optional[Union[service.ListColumnSpecsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListColumnSpecsAsyncPager: - r"""Lists column specs in a table spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_list_column_specs(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListColumnSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_specs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ListColumnSpecsRequest, dict]]): - The request object. Request message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - parent (:class:`str`): - Required. The resource name of the - table spec to list column specs from. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListColumnSpecsAsyncPager: - Response message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListColumnSpecsRequest): - request = service.ListColumnSpecsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_column_specs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListColumnSpecsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_column_spec(self, - request: Optional[Union[service.UpdateColumnSpecRequest, dict]] = None, - *, - column_spec: Optional[gca_column_spec.ColumnSpec] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_column_spec.ColumnSpec: - r"""Updates a column spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_update_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateColumnSpecRequest( - ) - - # Make the request - response = await client.update_column_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.UpdateColumnSpecRequest, dict]]): - The request object. Request message for - [AutoMl.UpdateColumnSpec][google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec] - column_spec (:class:`google.cloud.automl_v1beta1.types.ColumnSpec`): - Required. The column spec which - replaces the resource on the server. - - This corresponds to the ``column_spec`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.ColumnSpec: - A representation of a column in a relational table. When listing them, column specs are returned in the same order in which they were - given on import . Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [column_spec] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateColumnSpecRequest): - request = service.UpdateColumnSpecRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if column_spec is not None: - request.column_spec = column_spec - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_column_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("column_spec.name", request.column_spec.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_model(self, - request: Optional[Union[service.CreateModelRequest, dict]] = None, - *, - parent: Optional[str] = None, - model: Optional[gca_model.Model] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_create_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.CreateModelRequest, dict]]): - The request object. Request message for - [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel]. - parent (:class:`str`): - Required. Resource name of the parent - project where the model is being - created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - model (:class:`google.cloud.automl_v1beta1.types.Model`): - Required. The model to create. - This corresponds to the ``model`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.automl_v1beta1.types.Model` API - proto representing a trained machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, model] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateModelRequest): - request = service.CreateModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if model is not None: - request.model = model - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gca_model.Model, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_model(self, - request: Optional[Union[service.GetModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model.Model: - r"""Gets a model. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_get_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.GetModelRequest, dict]]): - The request object. Request message for - [AutoMl.GetModel][google.cloud.automl.v1beta1.AutoMl.GetModel]. - name (:class:`str`): - Required. Resource name of the model. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Model: - API proto representing a trained - machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelRequest): - request = service.GetModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_models(self, - request: Optional[Union[service.ListModelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelsAsyncPager: - r"""Lists models. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_list_models(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ListModelsRequest, dict]]): - The request object. Request message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - parent (:class:`str`): - Required. Resource name of the - project, from which to list the models. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelsAsyncPager: - Response message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelsRequest): - request = service.ListModelsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_models] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListModelsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_model(self, - request: Optional[Union[service.DeleteModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_delete_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.DeleteModelRequest, dict]]): - The request object. Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1beta1.AutoMl.DeleteModel]. - name (:class:`str`): - Required. Resource name of the model - being deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteModelRequest): - request = service.DeleteModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def deploy_model(self, - request: Optional[Union[service.DeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - - [node_number][google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_deploy_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.DeployModelRequest, dict]]): - The request object. Request message for - [AutoMl.DeployModel][google.cloud.automl.v1beta1.AutoMl.DeployModel]. - name (:class:`str`): - Required. Resource name of the model - to deploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeployModelRequest): - request = service.DeployModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.deploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def undeploy_model(self, - request: Optional[Union[service.UndeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_undeploy_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.UndeployModelRequest, dict]]): - The request object. Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1beta1.AutoMl.UndeployModel]. - name (:class:`str`): - Required. Resource name of the model - to undeploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UndeployModelRequest): - request = service.UndeployModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.undeploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def export_model(self, - request: Optional[Union[service.ExportModelRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.ModelExportOutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - - [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_export_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportModelRequest( - name="name_value", - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ExportModelRequest, dict]]): - The request object. Request message for - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an - error code will be returned. - name (:class:`str`): - Required. The resource name of the - model to export. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1beta1.types.ModelExportOutputConfig`): - Required. The desired output location - and configuration. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportModelRequest): - request = service.ExportModelRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def export_evaluated_examples(self, - request: Optional[Union[service.ExportEvaluatedExamplesRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.ExportEvaluatedExamplesOutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Exports examples on which the model was evaluated (i.e. which - were in the TEST set of the dataset the model was created from), - together with their ground truth annotations and the annotations - created (predicted) by the model. The examples, ground truth and - predictions are exported in the state they were at the moment - the model was evaluated. - - This export is available only for 30 days since the model - evaluation is created. - - Currently only available for Tables. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_export_evaluated_examples(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportEvaluatedExamplesRequest( - name="name_value", - ) - - # Make the request - operation = client.export_evaluated_examples(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesRequest, dict]]): - The request object. Request message for - [AutoMl.ExportEvaluatedExamples][google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples]. - name (:class:`str`): - Required. The resource name of the - model whose evaluated examples are to be - exported. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOutputConfig`): - Required. The desired output location - and configuration. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportEvaluatedExamplesRequest): - request = service.ExportEvaluatedExamplesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.export_evaluated_examples] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_model_evaluation(self, - request: Optional[Union[service.GetModelEvaluationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model_evaluation.ModelEvaluation: - r"""Gets a model evaluation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_get_model_evaluation(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model_evaluation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.GetModelEvaluationRequest, dict]]): - The request object. Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation]. - name (:class:`str`): - Required. Resource name for the model - evaluation. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.ModelEvaluation: - Evaluation results of a model. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelEvaluationRequest): - request = service.GetModelEvaluationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_model_evaluation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_model_evaluations(self, - request: Optional[Union[service.ListModelEvaluationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelEvaluationsAsyncPager: - r"""Lists model evaluations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_list_model_evaluations(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelEvaluationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.ListModelEvaluationsRequest, dict]]): - The request object. Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - parent (:class:`str`): - Required. Resource name of the model - to list the model evaluations for. If - modelId is set as "-", this will list - model evaluations from across all models - of the parent location. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelEvaluationsAsyncPager: - Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelEvaluationsRequest): - request = service.ListModelEvaluationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_model_evaluations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListModelEvaluationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AutoMlAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AutoMlAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/client.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/client.py deleted file mode 100644 index efcd499384a4..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/client.py +++ /dev/null @@ -1,3661 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.automl_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1beta1.services.auto_ml import pagers -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import classification -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import data_stats -from google.cloud.automl_v1beta1.types import data_types -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import detection -from google.cloud.automl_v1beta1.types import image -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model as gca_model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import operations -from google.cloud.automl_v1beta1.types import regression -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.cloud.automl_v1beta1.types import tables -from google.cloud.automl_v1beta1.types import text -from google.cloud.automl_v1beta1.types import text_extraction -from google.cloud.automl_v1beta1.types import text_sentiment -from google.cloud.automl_v1beta1.types import translation -from google.cloud.automl_v1beta1.types import video -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AutoMlTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AutoMlGrpcTransport -from .transports.grpc_asyncio import AutoMlGrpcAsyncIOTransport -from .transports.rest import AutoMlRestTransport - - -class AutoMlClientMeta(type): - """Metaclass for the AutoMl client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AutoMlTransport]] - _transport_registry["grpc"] = AutoMlGrpcTransport - _transport_registry["grpc_asyncio"] = AutoMlGrpcAsyncIOTransport - _transport_registry["rest"] = AutoMlRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AutoMlTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AutoMlClient(metaclass=AutoMlClientMeta): - """AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "automl.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "automl.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AutoMlTransport: - """Returns the transport used by the client instance. - - Returns: - AutoMlTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def annotation_spec_path(project: str,location: str,dataset: str,annotation_spec: str,) -> str: - """Returns a fully-qualified annotation_spec string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format(project=project, location=location, dataset=dataset, annotation_spec=annotation_spec, ) - - @staticmethod - def parse_annotation_spec_path(path: str) -> Dict[str,str]: - """Parses a annotation_spec path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)/annotationSpecs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def column_spec_path(project: str,location: str,dataset: str,table_spec: str,column_spec: str,) -> str: - """Returns a fully-qualified column_spec string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}/columnSpecs/{column_spec}".format(project=project, location=location, dataset=dataset, table_spec=table_spec, column_spec=column_spec, ) - - @staticmethod - def parse_column_spec_path(path: str) -> Dict[str,str]: - """Parses a column_spec path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)/tableSpecs/(?P.+?)/columnSpecs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str,location: str,dataset: str,) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}".format(project=project, location=location, dataset=dataset, ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str,str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def model_path(project: str,location: str,model: str,) -> str: - """Returns a fully-qualified model string.""" - return "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - - @staticmethod - def parse_model_path(path: str) -> Dict[str,str]: - """Parses a model path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def model_evaluation_path(project: str,location: str,model: str,model_evaluation: str,) -> str: - """Returns a fully-qualified model_evaluation string.""" - return "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}".format(project=project, location=location, model=model, model_evaluation=model_evaluation, ) - - @staticmethod - def parse_model_evaluation_path(path: str) -> Dict[str,str]: - """Parses a model_evaluation path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)/modelEvaluations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def table_spec_path(project: str,location: str,dataset: str,table_spec: str,) -> str: - """Returns a fully-qualified table_spec string.""" - return "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}".format(project=project, location=location, dataset=dataset, table_spec=table_spec, ) - - @staticmethod - def parse_table_spec_path(path: str) -> Dict[str,str]: - """Parses a table_spec path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/datasets/(?P.+?)/tableSpecs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AutoMlClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AutoMlClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AutoMlClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AutoMlTransport, Callable[..., AutoMlTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the auto ml client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AutoMlTransport,Callable[..., AutoMlTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AutoMlTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AutoMlClient._read_environment_variables() - self._client_cert_source = AutoMlClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AutoMlClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AutoMlTransport) - if transport_provided: - # transport is a AutoMlTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AutoMlTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AutoMlClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AutoMlTransport], Callable[..., AutoMlTransport]] = ( - AutoMlClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AutoMlTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1beta1.AutoMlClient`.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "credentialsType": None, - } - ) - - def create_dataset(self, - request: Optional[Union[service.CreateDatasetRequest, dict]] = None, - *, - parent: Optional[str] = None, - dataset: Optional[gca_dataset.Dataset] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_dataset.Dataset: - r"""Creates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_create_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - response = client.create_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.CreateDatasetRequest, dict]): - The request object. Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1beta1.AutoMl.CreateDataset]. - parent (str): - Required. The resource name of the - project to create the dataset for. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - dataset (google.cloud.automl_v1beta1.types.Dataset): - Required. The dataset to create. - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, dataset] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateDatasetRequest): - request = service.CreateDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if dataset is not None: - request.dataset = dataset - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_dataset(self, - request: Optional[Union[service.GetDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataset.Dataset: - r"""Gets a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_get_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = client.get_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.GetDatasetRequest, dict]): - The request object. Request message for - [AutoMl.GetDataset][google.cloud.automl.v1beta1.AutoMl.GetDataset]. - name (str): - Required. The resource name of the - dataset to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetDatasetRequest): - request = service.GetDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_datasets(self, - request: Optional[Union[service.ListDatasetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatasetsPager: - r"""Lists datasets in a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_list_datasets(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ListDatasetsRequest, dict]): - The request object. Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - parent (str): - Required. The resource name of the - project from which to list datasets. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListDatasetsPager: - Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListDatasetsRequest): - request = service.ListDatasetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_datasets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatasetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_dataset(self, - request: Optional[Union[service.UpdateDatasetRequest, dict]] = None, - *, - dataset: Optional[gca_dataset.Dataset] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_dataset.Dataset: - r"""Updates a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_update_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = client.update_dataset(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.UpdateDatasetRequest, dict]): - The request object. Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1beta1.AutoMl.UpdateDataset] - dataset (google.cloud.automl_v1beta1.types.Dataset): - Required. The dataset which replaces - the resource on the server. - - This corresponds to the ``dataset`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [dataset] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateDatasetRequest): - request = service.UpdateDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if dataset is not None: - request.dataset = dataset - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("dataset.name", request.dataset.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dataset(self, - request: Optional[Union[service.DeleteDatasetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_delete_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.DeleteDatasetRequest, dict]): - The request object. Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1beta1.AutoMl.DeleteDataset]. - name (str): - Required. The resource name of the - dataset to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteDatasetRequest): - request = service.DeleteDatasetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dataset] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def import_data(self, - request: Optional[Union[service.ImportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1beta1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_import_data(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ImportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ImportDataRequest, dict]): - The request object. Request message for - [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData]. - name (str): - Required. Dataset name. Dataset must - already exist. All imported annotations - and examples will be added. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (google.cloud.automl_v1beta1.types.InputConfig): - Required. The desired input location - and its domain specific semantics, if - any. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ImportDataRequest): - request = service.ImportDataRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.import_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def export_data(self, - request: Optional[Union[service.ExportDataRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_export_data(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ExportDataRequest, dict]): - The request object. Request message for - [AutoMl.ExportData][google.cloud.automl.v1beta1.AutoMl.ExportData]. - name (str): - Required. The resource name of the - dataset. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1beta1.types.OutputConfig): - Required. The desired output - location. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportDataRequest): - request = service.ExportDataRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_data] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_annotation_spec(self, - request: Optional[Union[service.GetAnnotationSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> annotation_spec.AnnotationSpec: - r"""Gets an annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_get_annotation_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_annotation_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.GetAnnotationSpecRequest, dict]): - The request object. Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec]. - name (str): - Required. The resource name of the - annotation spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.AnnotationSpec: - A definition of an annotation spec. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetAnnotationSpecRequest): - request = service.GetAnnotationSpecRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_annotation_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_table_spec(self, - request: Optional[Union[service.GetTableSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> table_spec.TableSpec: - r"""Gets a table spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_get_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetTableSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_table_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.GetTableSpecRequest, dict]): - The request object. Request message for - [AutoMl.GetTableSpec][google.cloud.automl.v1beta1.AutoMl.GetTableSpec]. - name (str): - Required. The resource name of the - table spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.TableSpec: - A specification of a relational table. - The table's schema is represented via its child - column specs. It is pre-populated as part of - ImportData by schema inference algorithm, the version - of which is a required parameter of ImportData - InputConfig. Note: While working with a table, at - times the schema may be inconsistent with the data in - the table (e.g. string in a FLOAT64 column). The - consistency validation is done upon creation of a - model. Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetTableSpecRequest): - request = service.GetTableSpecRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_table_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_table_specs(self, - request: Optional[Union[service.ListTableSpecsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTableSpecsPager: - r"""Lists table specs in a dataset. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_list_table_specs(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListTableSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_specs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ListTableSpecsRequest, dict]): - The request object. Request message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - parent (str): - Required. The resource name of the - dataset to list table specs from. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListTableSpecsPager: - Response message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListTableSpecsRequest): - request = service.ListTableSpecsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_table_specs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTableSpecsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_table_spec(self, - request: Optional[Union[service.UpdateTableSpecRequest, dict]] = None, - *, - table_spec: Optional[gca_table_spec.TableSpec] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_table_spec.TableSpec: - r"""Updates a table spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_update_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateTableSpecRequest( - ) - - # Make the request - response = client.update_table_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.UpdateTableSpecRequest, dict]): - The request object. Request message for - [AutoMl.UpdateTableSpec][google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec] - table_spec (google.cloud.automl_v1beta1.types.TableSpec): - Required. The table spec which - replaces the resource on the server. - - This corresponds to the ``table_spec`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.TableSpec: - A specification of a relational table. - The table's schema is represented via its child - column specs. It is pre-populated as part of - ImportData by schema inference algorithm, the version - of which is a required parameter of ImportData - InputConfig. Note: While working with a table, at - times the schema may be inconsistent with the data in - the table (e.g. string in a FLOAT64 column). The - consistency validation is done upon creation of a - model. Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [table_spec] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateTableSpecRequest): - request = service.UpdateTableSpecRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if table_spec is not None: - request.table_spec = table_spec - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_table_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("table_spec.name", request.table_spec.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_column_spec(self, - request: Optional[Union[service.GetColumnSpecRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> column_spec.ColumnSpec: - r"""Gets a column spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_get_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetColumnSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_column_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.GetColumnSpecRequest, dict]): - The request object. Request message for - [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec]. - name (str): - Required. The resource name of the - column spec to retrieve. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.ColumnSpec: - A representation of a column in a relational table. When listing them, column specs are returned in the same order in which they were - given on import . Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetColumnSpecRequest): - request = service.GetColumnSpecRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_column_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_column_specs(self, - request: Optional[Union[service.ListColumnSpecsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListColumnSpecsPager: - r"""Lists column specs in a table spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_list_column_specs(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListColumnSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_specs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ListColumnSpecsRequest, dict]): - The request object. Request message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - parent (str): - Required. The resource name of the - table spec to list column specs from. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListColumnSpecsPager: - Response message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListColumnSpecsRequest): - request = service.ListColumnSpecsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_column_specs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListColumnSpecsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_column_spec(self, - request: Optional[Union[service.UpdateColumnSpecRequest, dict]] = None, - *, - column_spec: Optional[gca_column_spec.ColumnSpec] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gca_column_spec.ColumnSpec: - r"""Updates a column spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_update_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateColumnSpecRequest( - ) - - # Make the request - response = client.update_column_spec(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.UpdateColumnSpecRequest, dict]): - The request object. Request message for - [AutoMl.UpdateColumnSpec][google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec] - column_spec (google.cloud.automl_v1beta1.types.ColumnSpec): - Required. The column spec which - replaces the resource on the server. - - This corresponds to the ``column_spec`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.ColumnSpec: - A representation of a column in a relational table. When listing them, column specs are returned in the same order in which they were - given on import . Used by: \* Tables - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [column_spec] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UpdateColumnSpecRequest): - request = service.UpdateColumnSpecRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if column_spec is not None: - request.column_spec = column_spec - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_column_spec] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("column_spec.name", request.column_spec.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_model(self, - request: Optional[Union[service.CreateModelRequest, dict]] = None, - *, - parent: Optional[str] = None, - model: Optional[gca_model.Model] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_create_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.CreateModelRequest, dict]): - The request object. Request message for - [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel]. - parent (str): - Required. Resource name of the parent - project where the model is being - created. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - model (google.cloud.automl_v1beta1.types.Model): - Required. The model to create. - This corresponds to the ``model`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.automl_v1beta1.types.Model` API - proto representing a trained machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, model] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.CreateModelRequest): - request = service.CreateModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if model is not None: - request.model = model - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gca_model.Model, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_model(self, - request: Optional[Union[service.GetModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model.Model: - r"""Gets a model. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_get_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = client.get_model(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.GetModelRequest, dict]): - The request object. Request message for - [AutoMl.GetModel][google.cloud.automl.v1beta1.AutoMl.GetModel]. - name (str): - Required. Resource name of the model. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.Model: - API proto representing a trained - machine learning model. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelRequest): - request = service.GetModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_models(self, - request: Optional[Union[service.ListModelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelsPager: - r"""Lists models. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_list_models(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ListModelsRequest, dict]): - The request object. Request message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - parent (str): - Required. Resource name of the - project, from which to list the models. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelsPager: - Response message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelsRequest): - request = service.ListModelsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_models] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListModelsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_model(self, - request: Optional[Union[service.DeleteModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_delete_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.DeleteModelRequest, dict]): - The request object. Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1beta1.AutoMl.DeleteModel]. - name (str): - Required. Resource name of the model - being deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeleteModelRequest): - request = service.DeleteModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def deploy_model(self, - request: Optional[Union[service.DeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - - [node_number][google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_deploy_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.DeployModelRequest, dict]): - The request object. Request message for - [AutoMl.DeployModel][google.cloud.automl.v1beta1.AutoMl.DeployModel]. - name (str): - Required. Resource name of the model - to deploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.DeployModelRequest): - request = service.DeployModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def undeploy_model(self, - request: Optional[Union[service.UndeployModelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_undeploy_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.UndeployModelRequest, dict]): - The request object. Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1beta1.AutoMl.UndeployModel]. - name (str): - Required. Resource name of the model - to undeploy. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.UndeployModelRequest): - request = service.UndeployModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.undeploy_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def export_model(self, - request: Optional[Union[service.ExportModelRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.ModelExportOutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - - [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_export_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportModelRequest( - name="name_value", - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ExportModelRequest, dict]): - The request object. Request message for - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an - error code will be returned. - name (str): - Required. The resource name of the - model to export. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1beta1.types.ModelExportOutputConfig): - Required. The desired output location - and configuration. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportModelRequest): - request = service.ExportModelRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_model] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def export_evaluated_examples(self, - request: Optional[Union[service.ExportEvaluatedExamplesRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[io.ExportEvaluatedExamplesOutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Exports examples on which the model was evaluated (i.e. which - were in the TEST set of the dataset the model was created from), - together with their ground truth annotations and the annotations - created (predicted) by the model. The examples, ground truth and - predictions are exported in the state they were at the moment - the model was evaluated. - - This export is available only for 30 days since the model - evaluation is created. - - Currently only available for Tables. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_export_evaluated_examples(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportEvaluatedExamplesRequest( - name="name_value", - ) - - # Make the request - operation = client.export_evaluated_examples(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesRequest, dict]): - The request object. Request message for - [AutoMl.ExportEvaluatedExamples][google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples]. - name (str): - Required. The resource name of the - model whose evaluated examples are to be - exported. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOutputConfig): - Required. The desired output location - and configuration. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, output_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ExportEvaluatedExamplesRequest): - request = service.ExportEvaluatedExamplesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if output_config is not None: - request.output_config = output_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.export_evaluated_examples] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_model_evaluation(self, - request: Optional[Union[service.GetModelEvaluationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> model_evaluation.ModelEvaluation: - r"""Gets a model evaluation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_get_model_evaluation(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = client.get_model_evaluation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.GetModelEvaluationRequest, dict]): - The request object. Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation]. - name (str): - Required. Resource name for the model - evaluation. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.ModelEvaluation: - Evaluation results of a model. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.GetModelEvaluationRequest): - request = service.GetModelEvaluationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_model_evaluation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_model_evaluations(self, - request: Optional[Union[service.ListModelEvaluationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListModelEvaluationsPager: - r"""Lists model evaluations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_list_model_evaluations(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelEvaluationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.ListModelEvaluationsRequest, dict]): - The request object. Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - parent (str): - Required. Resource name of the model - to list the model evaluations for. If - modelId is set as "-", this will list - model evaluations from across all models - of the parent location. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelEvaluationsPager: - Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, service.ListModelEvaluationsRequest): - request = service.ListModelEvaluationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_model_evaluations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListModelEvaluationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AutoMlClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AutoMlClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/pagers.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/pagers.py deleted file mode 100644 index 7cc233919bbd..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/pagers.py +++ /dev/null @@ -1,727 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec - - -class ListDatasetsPager: - """A pager for iterating through ``list_datasets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListDatasetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``datasets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatasets`` requests and continue to iterate - through the ``datasets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListDatasetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListDatasetsResponse], - request: service.ListDatasetsRequest, - response: service.ListDatasetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListDatasetsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListDatasetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListDatasetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListDatasetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dataset.Dataset]: - for page in self.pages: - yield from page.datasets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatasetsAsyncPager: - """A pager for iterating through ``list_datasets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListDatasetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``datasets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatasets`` requests and continue to iterate - through the ``datasets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListDatasetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListDatasetsResponse]], - request: service.ListDatasetsRequest, - response: service.ListDatasetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListDatasetsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListDatasetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListDatasetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListDatasetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dataset.Dataset]: - async def async_generator(): - async for page in self.pages: - for response in page.datasets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTableSpecsPager: - """A pager for iterating through ``list_table_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListTableSpecsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``table_specs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTableSpecs`` requests and continue to iterate - through the ``table_specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListTableSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListTableSpecsResponse], - request: service.ListTableSpecsRequest, - response: service.ListTableSpecsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListTableSpecsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListTableSpecsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListTableSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListTableSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[table_spec.TableSpec]: - for page in self.pages: - yield from page.table_specs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTableSpecsAsyncPager: - """A pager for iterating through ``list_table_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListTableSpecsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``table_specs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTableSpecs`` requests and continue to iterate - through the ``table_specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListTableSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListTableSpecsResponse]], - request: service.ListTableSpecsRequest, - response: service.ListTableSpecsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListTableSpecsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListTableSpecsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListTableSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListTableSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[table_spec.TableSpec]: - async def async_generator(): - async for page in self.pages: - for response in page.table_specs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListColumnSpecsPager: - """A pager for iterating through ``list_column_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListColumnSpecsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``column_specs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListColumnSpecs`` requests and continue to iterate - through the ``column_specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListColumnSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListColumnSpecsResponse], - request: service.ListColumnSpecsRequest, - response: service.ListColumnSpecsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListColumnSpecsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListColumnSpecsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListColumnSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListColumnSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[column_spec.ColumnSpec]: - for page in self.pages: - yield from page.column_specs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListColumnSpecsAsyncPager: - """A pager for iterating through ``list_column_specs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListColumnSpecsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``column_specs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListColumnSpecs`` requests and continue to iterate - through the ``column_specs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListColumnSpecsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListColumnSpecsResponse]], - request: service.ListColumnSpecsRequest, - response: service.ListColumnSpecsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListColumnSpecsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListColumnSpecsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListColumnSpecsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListColumnSpecsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[column_spec.ColumnSpec]: - async def async_generator(): - async for page in self.pages: - for response in page.column_specs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelsPager: - """A pager for iterating through ``list_models`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListModelsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``model`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListModels`` requests and continue to iterate - through the ``model`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListModelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListModelsResponse], - request: service.ListModelsRequest, - response: service.ListModelsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListModelsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListModelsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListModelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[model.Model]: - for page in self.pages: - yield from page.model - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelsAsyncPager: - """A pager for iterating through ``list_models`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListModelsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``model`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListModels`` requests and continue to iterate - through the ``model`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListModelsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListModelsResponse]], - request: service.ListModelsRequest, - response: service.ListModelsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListModelsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListModelsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListModelsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[model.Model]: - async def async_generator(): - async for page in self.pages: - for response in page.model: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelEvaluationsPager: - """A pager for iterating through ``list_model_evaluations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListModelEvaluationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``model_evaluation`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListModelEvaluations`` requests and continue to iterate - through the ``model_evaluation`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListModelEvaluationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., service.ListModelEvaluationsResponse], - request: service.ListModelEvaluationsRequest, - response: service.ListModelEvaluationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListModelEvaluationsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListModelEvaluationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelEvaluationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[service.ListModelEvaluationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[model_evaluation.ModelEvaluation]: - for page in self.pages: - yield from page.model_evaluation - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListModelEvaluationsAsyncPager: - """A pager for iterating through ``list_model_evaluations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.automl_v1beta1.types.ListModelEvaluationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``model_evaluation`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListModelEvaluations`` requests and continue to iterate - through the ``model_evaluation`` field on the - corresponding responses. - - All the usual :class:`google.cloud.automl_v1beta1.types.ListModelEvaluationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[service.ListModelEvaluationsResponse]], - request: service.ListModelEvaluationsRequest, - response: service.ListModelEvaluationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.automl_v1beta1.types.ListModelEvaluationsRequest): - The initial request object. - response (google.cloud.automl_v1beta1.types.ListModelEvaluationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = service.ListModelEvaluationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[service.ListModelEvaluationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[model_evaluation.ModelEvaluation]: - async def async_generator(): - async for page in self.pages: - for response in page.model_evaluation: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/README.rst b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/README.rst deleted file mode 100644 index 23f3112ee6dd..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AutoMlTransport` is the ABC for all transports. -- public child `AutoMlGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AutoMlGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAutoMlRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AutoMlRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/__init__.py deleted file mode 100644 index ad74a7909b03..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AutoMlTransport -from .grpc import AutoMlGrpcTransport -from .grpc_asyncio import AutoMlGrpcAsyncIOTransport -from .rest import AutoMlRestTransport -from .rest import AutoMlRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AutoMlTransport]] -_transport_registry['grpc'] = AutoMlGrpcTransport -_transport_registry['grpc_asyncio'] = AutoMlGrpcAsyncIOTransport -_transport_registry['rest'] = AutoMlRestTransport - -__all__ = ( - 'AutoMlTransport', - 'AutoMlGrpcTransport', - 'AutoMlGrpcAsyncIOTransport', - 'AutoMlRestTransport', - 'AutoMlRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/base.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/base.py deleted file mode 100644 index 11d9ff75fe59..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/base.py +++ /dev/null @@ -1,612 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.automl_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AutoMlTransport(abc.ABC): - """Abstract transport class for AutoMl.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'automl.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_dataset: gapic_v1.method.wrap_method( - self.create_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.get_dataset: gapic_v1.method.wrap_method( - self.get_dataset, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_datasets: gapic_v1.method.wrap_method( - self.list_datasets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_dataset: gapic_v1.method.wrap_method( - self.update_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.delete_dataset: gapic_v1.method.wrap_method( - self.delete_dataset, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.import_data: gapic_v1.method.wrap_method( - self.import_data, - default_timeout=5.0, - client_info=client_info, - ), - self.export_data: gapic_v1.method.wrap_method( - self.export_data, - default_timeout=5.0, - client_info=client_info, - ), - self.get_annotation_spec: gapic_v1.method.wrap_method( - self.get_annotation_spec, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.get_table_spec: gapic_v1.method.wrap_method( - self.get_table_spec, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_table_specs: gapic_v1.method.wrap_method( - self.list_table_specs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_table_spec: gapic_v1.method.wrap_method( - self.update_table_spec, - default_timeout=5.0, - client_info=client_info, - ), - self.get_column_spec: gapic_v1.method.wrap_method( - self.get_column_spec, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_column_specs: gapic_v1.method.wrap_method( - self.list_column_specs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_column_spec: gapic_v1.method.wrap_method( - self.update_column_spec, - default_timeout=5.0, - client_info=client_info, - ), - self.create_model: gapic_v1.method.wrap_method( - self.create_model, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model: gapic_v1.method.wrap_method( - self.get_model, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_models: gapic_v1.method.wrap_method( - self.list_models, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.delete_model: gapic_v1.method.wrap_method( - self.delete_model, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.deploy_model: gapic_v1.method.wrap_method( - self.deploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.undeploy_model: gapic_v1.method.wrap_method( - self.undeploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.export_model: gapic_v1.method.wrap_method( - self.export_model, - default_timeout=5.0, - client_info=client_info, - ), - self.export_evaluated_examples: gapic_v1.method.wrap_method( - self.export_evaluated_examples, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model_evaluation: gapic_v1.method.wrap_method( - self.get_model_evaluation, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_model_evaluations: gapic_v1.method.wrap_method( - self.list_model_evaluations, - default_timeout=5.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - Union[ - gca_dataset.Dataset, - Awaitable[gca_dataset.Dataset] - ]]: - raise NotImplementedError() - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - Union[ - dataset.Dataset, - Awaitable[dataset.Dataset] - ]]: - raise NotImplementedError() - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - Union[ - service.ListDatasetsResponse, - Awaitable[service.ListDatasetsResponse] - ]]: - raise NotImplementedError() - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - Union[ - gca_dataset.Dataset, - Awaitable[gca_dataset.Dataset] - ]]: - raise NotImplementedError() - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - Union[ - annotation_spec.AnnotationSpec, - Awaitable[annotation_spec.AnnotationSpec] - ]]: - raise NotImplementedError() - - @property - def get_table_spec(self) -> Callable[ - [service.GetTableSpecRequest], - Union[ - table_spec.TableSpec, - Awaitable[table_spec.TableSpec] - ]]: - raise NotImplementedError() - - @property - def list_table_specs(self) -> Callable[ - [service.ListTableSpecsRequest], - Union[ - service.ListTableSpecsResponse, - Awaitable[service.ListTableSpecsResponse] - ]]: - raise NotImplementedError() - - @property - def update_table_spec(self) -> Callable[ - [service.UpdateTableSpecRequest], - Union[ - gca_table_spec.TableSpec, - Awaitable[gca_table_spec.TableSpec] - ]]: - raise NotImplementedError() - - @property - def get_column_spec(self) -> Callable[ - [service.GetColumnSpecRequest], - Union[ - column_spec.ColumnSpec, - Awaitable[column_spec.ColumnSpec] - ]]: - raise NotImplementedError() - - @property - def list_column_specs(self) -> Callable[ - [service.ListColumnSpecsRequest], - Union[ - service.ListColumnSpecsResponse, - Awaitable[service.ListColumnSpecsResponse] - ]]: - raise NotImplementedError() - - @property - def update_column_spec(self) -> Callable[ - [service.UpdateColumnSpecRequest], - Union[ - gca_column_spec.ColumnSpec, - Awaitable[gca_column_spec.ColumnSpec] - ]]: - raise NotImplementedError() - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - Union[ - model.Model, - Awaitable[model.Model] - ]]: - raise NotImplementedError() - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - Union[ - service.ListModelsResponse, - Awaitable[service.ListModelsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def export_evaluated_examples(self) -> Callable[ - [service.ExportEvaluatedExamplesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - Union[ - model_evaluation.ModelEvaluation, - Awaitable[model_evaluation.ModelEvaluation] - ]]: - raise NotImplementedError() - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - Union[ - service.ListModelEvaluationsResponse, - Awaitable[service.ListModelEvaluationsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AutoMlTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/grpc.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/grpc.py deleted file mode 100644 index 37ff68978074..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/grpc.py +++ /dev/null @@ -1,1054 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.longrunning import operations_pb2 # type: ignore -from .base import AutoMlTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AutoMlGrpcTransport(AutoMlTransport): - """gRPC backend transport for AutoMl. - - AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - gca_dataset.Dataset]: - r"""Return a callable for the create dataset method over gRPC. - - Creates a dataset. - - Returns: - Callable[[~.CreateDatasetRequest], - ~.Dataset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dataset' not in self._stubs: - self._stubs['create_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/CreateDataset', - request_serializer=service.CreateDatasetRequest.serialize, - response_deserializer=gca_dataset.Dataset.deserialize, - ) - return self._stubs['create_dataset'] - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - dataset.Dataset]: - r"""Return a callable for the get dataset method over gRPC. - - Gets a dataset. - - Returns: - Callable[[~.GetDatasetRequest], - ~.Dataset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dataset' not in self._stubs: - self._stubs['get_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetDataset', - request_serializer=service.GetDatasetRequest.serialize, - response_deserializer=dataset.Dataset.deserialize, - ) - return self._stubs['get_dataset'] - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - service.ListDatasetsResponse]: - r"""Return a callable for the list datasets method over gRPC. - - Lists datasets in a project. - - Returns: - Callable[[~.ListDatasetsRequest], - ~.ListDatasetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_datasets' not in self._stubs: - self._stubs['list_datasets'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListDatasets', - request_serializer=service.ListDatasetsRequest.serialize, - response_deserializer=service.ListDatasetsResponse.deserialize, - ) - return self._stubs['list_datasets'] - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - gca_dataset.Dataset]: - r"""Return a callable for the update dataset method over gRPC. - - Updates a dataset. - - Returns: - Callable[[~.UpdateDatasetRequest], - ~.Dataset]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_dataset' not in self._stubs: - self._stubs['update_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UpdateDataset', - request_serializer=service.UpdateDatasetRequest.serialize, - response_deserializer=gca_dataset.Dataset.deserialize, - ) - return self._stubs['update_dataset'] - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete dataset method over gRPC. - - Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteDatasetRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dataset' not in self._stubs: - self._stubs['delete_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/DeleteDataset', - request_serializer=service.DeleteDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_dataset'] - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - operations_pb2.Operation]: - r"""Return a callable for the import data method over gRPC. - - Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1beta1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - Returns: - Callable[[~.ImportDataRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_data' not in self._stubs: - self._stubs['import_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ImportData', - request_serializer=service.ImportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_data'] - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - operations_pb2.Operation]: - r"""Return a callable for the export data method over gRPC. - - Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportDataRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_data' not in self._stubs: - self._stubs['export_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ExportData', - request_serializer=service.ExportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_data'] - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - annotation_spec.AnnotationSpec]: - r"""Return a callable for the get annotation spec method over gRPC. - - Gets an annotation spec. - - Returns: - Callable[[~.GetAnnotationSpecRequest], - ~.AnnotationSpec]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_annotation_spec' not in self._stubs: - self._stubs['get_annotation_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetAnnotationSpec', - request_serializer=service.GetAnnotationSpecRequest.serialize, - response_deserializer=annotation_spec.AnnotationSpec.deserialize, - ) - return self._stubs['get_annotation_spec'] - - @property - def get_table_spec(self) -> Callable[ - [service.GetTableSpecRequest], - table_spec.TableSpec]: - r"""Return a callable for the get table spec method over gRPC. - - Gets a table spec. - - Returns: - Callable[[~.GetTableSpecRequest], - ~.TableSpec]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table_spec' not in self._stubs: - self._stubs['get_table_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetTableSpec', - request_serializer=service.GetTableSpecRequest.serialize, - response_deserializer=table_spec.TableSpec.deserialize, - ) - return self._stubs['get_table_spec'] - - @property - def list_table_specs(self) -> Callable[ - [service.ListTableSpecsRequest], - service.ListTableSpecsResponse]: - r"""Return a callable for the list table specs method over gRPC. - - Lists table specs in a dataset. - - Returns: - Callable[[~.ListTableSpecsRequest], - ~.ListTableSpecsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_table_specs' not in self._stubs: - self._stubs['list_table_specs'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListTableSpecs', - request_serializer=service.ListTableSpecsRequest.serialize, - response_deserializer=service.ListTableSpecsResponse.deserialize, - ) - return self._stubs['list_table_specs'] - - @property - def update_table_spec(self) -> Callable[ - [service.UpdateTableSpecRequest], - gca_table_spec.TableSpec]: - r"""Return a callable for the update table spec method over gRPC. - - Updates a table spec. - - Returns: - Callable[[~.UpdateTableSpecRequest], - ~.TableSpec]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_table_spec' not in self._stubs: - self._stubs['update_table_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UpdateTableSpec', - request_serializer=service.UpdateTableSpecRequest.serialize, - response_deserializer=gca_table_spec.TableSpec.deserialize, - ) - return self._stubs['update_table_spec'] - - @property - def get_column_spec(self) -> Callable[ - [service.GetColumnSpecRequest], - column_spec.ColumnSpec]: - r"""Return a callable for the get column spec method over gRPC. - - Gets a column spec. - - Returns: - Callable[[~.GetColumnSpecRequest], - ~.ColumnSpec]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_column_spec' not in self._stubs: - self._stubs['get_column_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetColumnSpec', - request_serializer=service.GetColumnSpecRequest.serialize, - response_deserializer=column_spec.ColumnSpec.deserialize, - ) - return self._stubs['get_column_spec'] - - @property - def list_column_specs(self) -> Callable[ - [service.ListColumnSpecsRequest], - service.ListColumnSpecsResponse]: - r"""Return a callable for the list column specs method over gRPC. - - Lists column specs in a table spec. - - Returns: - Callable[[~.ListColumnSpecsRequest], - ~.ListColumnSpecsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_column_specs' not in self._stubs: - self._stubs['list_column_specs'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListColumnSpecs', - request_serializer=service.ListColumnSpecsRequest.serialize, - response_deserializer=service.ListColumnSpecsResponse.deserialize, - ) - return self._stubs['list_column_specs'] - - @property - def update_column_spec(self) -> Callable[ - [service.UpdateColumnSpecRequest], - gca_column_spec.ColumnSpec]: - r"""Return a callable for the update column spec method over gRPC. - - Updates a column spec. - - Returns: - Callable[[~.UpdateColumnSpecRequest], - ~.ColumnSpec]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_column_spec' not in self._stubs: - self._stubs['update_column_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UpdateColumnSpec', - request_serializer=service.UpdateColumnSpecRequest.serialize, - response_deserializer=gca_column_spec.ColumnSpec.deserialize, - ) - return self._stubs['update_column_spec'] - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the create model method over gRPC. - - Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - Returns: - Callable[[~.CreateModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_model' not in self._stubs: - self._stubs['create_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/CreateModel', - request_serializer=service.CreateModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_model'] - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - model.Model]: - r"""Return a callable for the get model method over gRPC. - - Gets a model. - - Returns: - Callable[[~.GetModelRequest], - ~.Model]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model' not in self._stubs: - self._stubs['get_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetModel', - request_serializer=service.GetModelRequest.serialize, - response_deserializer=model.Model.deserialize, - ) - return self._stubs['get_model'] - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - service.ListModelsResponse]: - r"""Return a callable for the list models method over gRPC. - - Lists models. - - Returns: - Callable[[~.ListModelsRequest], - ~.ListModelsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_models' not in self._stubs: - self._stubs['list_models'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListModels', - request_serializer=service.ListModelsRequest.serialize, - response_deserializer=service.ListModelsResponse.deserialize, - ) - return self._stubs['list_models'] - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete model method over gRPC. - - Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_model' not in self._stubs: - self._stubs['delete_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/DeleteModel', - request_serializer=service.DeleteModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_model'] - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the deploy model method over gRPC. - - Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - - [node_number][google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.DeployModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deploy_model' not in self._stubs: - self._stubs['deploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/DeployModel', - request_serializer=service.DeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['deploy_model'] - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the undeploy model method over gRPC. - - Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.UndeployModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'undeploy_model' not in self._stubs: - self._stubs['undeploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UndeployModel', - request_serializer=service.UndeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['undeploy_model'] - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - operations_pb2.Operation]: - r"""Return a callable for the export model method over gRPC. - - Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - - [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportModelRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_model' not in self._stubs: - self._stubs['export_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ExportModel', - request_serializer=service.ExportModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_model'] - - @property - def export_evaluated_examples(self) -> Callable[ - [service.ExportEvaluatedExamplesRequest], - operations_pb2.Operation]: - r"""Return a callable for the export evaluated examples method over gRPC. - - Exports examples on which the model was evaluated (i.e. which - were in the TEST set of the dataset the model was created from), - together with their ground truth annotations and the annotations - created (predicted) by the model. The examples, ground truth and - predictions are exported in the state they were at the moment - the model was evaluated. - - This export is available only for 30 days since the model - evaluation is created. - - Currently only available for Tables. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportEvaluatedExamplesRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_evaluated_examples' not in self._stubs: - self._stubs['export_evaluated_examples'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ExportEvaluatedExamples', - request_serializer=service.ExportEvaluatedExamplesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_evaluated_examples'] - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - model_evaluation.ModelEvaluation]: - r"""Return a callable for the get model evaluation method over gRPC. - - Gets a model evaluation. - - Returns: - Callable[[~.GetModelEvaluationRequest], - ~.ModelEvaluation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model_evaluation' not in self._stubs: - self._stubs['get_model_evaluation'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetModelEvaluation', - request_serializer=service.GetModelEvaluationRequest.serialize, - response_deserializer=model_evaluation.ModelEvaluation.deserialize, - ) - return self._stubs['get_model_evaluation'] - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - service.ListModelEvaluationsResponse]: - r"""Return a callable for the list model evaluations method over gRPC. - - Lists model evaluations. - - Returns: - Callable[[~.ListModelEvaluationsRequest], - ~.ListModelEvaluationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_model_evaluations' not in self._stubs: - self._stubs['list_model_evaluations'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListModelEvaluations', - request_serializer=service.ListModelEvaluationsRequest.serialize, - response_deserializer=service.ListModelEvaluationsResponse.deserialize, - ) - return self._stubs['list_model_evaluations'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AutoMlGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/grpc_asyncio.py deleted file mode 100644 index c42a42ae7a98..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/grpc_asyncio.py +++ /dev/null @@ -1,1309 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.longrunning import operations_pb2 # type: ignore -from .base import AutoMlTransport, DEFAULT_CLIENT_INFO -from .grpc import AutoMlGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AutoMlGrpcAsyncIOTransport(AutoMlTransport): - """gRPC AsyncIO backend transport for AutoMl. - - AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - Awaitable[gca_dataset.Dataset]]: - r"""Return a callable for the create dataset method over gRPC. - - Creates a dataset. - - Returns: - Callable[[~.CreateDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dataset' not in self._stubs: - self._stubs['create_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/CreateDataset', - request_serializer=service.CreateDatasetRequest.serialize, - response_deserializer=gca_dataset.Dataset.deserialize, - ) - return self._stubs['create_dataset'] - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - Awaitable[dataset.Dataset]]: - r"""Return a callable for the get dataset method over gRPC. - - Gets a dataset. - - Returns: - Callable[[~.GetDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dataset' not in self._stubs: - self._stubs['get_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetDataset', - request_serializer=service.GetDatasetRequest.serialize, - response_deserializer=dataset.Dataset.deserialize, - ) - return self._stubs['get_dataset'] - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - Awaitable[service.ListDatasetsResponse]]: - r"""Return a callable for the list datasets method over gRPC. - - Lists datasets in a project. - - Returns: - Callable[[~.ListDatasetsRequest], - Awaitable[~.ListDatasetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_datasets' not in self._stubs: - self._stubs['list_datasets'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListDatasets', - request_serializer=service.ListDatasetsRequest.serialize, - response_deserializer=service.ListDatasetsResponse.deserialize, - ) - return self._stubs['list_datasets'] - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - Awaitable[gca_dataset.Dataset]]: - r"""Return a callable for the update dataset method over gRPC. - - Updates a dataset. - - Returns: - Callable[[~.UpdateDatasetRequest], - Awaitable[~.Dataset]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_dataset' not in self._stubs: - self._stubs['update_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UpdateDataset', - request_serializer=service.UpdateDatasetRequest.serialize, - response_deserializer=gca_dataset.Dataset.deserialize, - ) - return self._stubs['update_dataset'] - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete dataset method over gRPC. - - Deletes a dataset and all of its contents. Returns empty - response in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteDatasetRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dataset' not in self._stubs: - self._stubs['delete_dataset'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/DeleteDataset', - request_serializer=service.DeleteDatasetRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_dataset'] - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the import data method over gRPC. - - Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A - [schema_inference_version][google.cloud.automl.v1beta1.InputConfig.params] - parameter must be explicitly set. Returns an empty response - in the [response][google.longrunning.Operation.response] - field when it completes. - - Returns: - Callable[[~.ImportDataRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'import_data' not in self._stubs: - self._stubs['import_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ImportData', - request_serializer=service.ImportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['import_data'] - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export data method over gRPC. - - Exports dataset's data to the provided output location. Returns - an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportDataRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_data' not in self._stubs: - self._stubs['export_data'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ExportData', - request_serializer=service.ExportDataRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_data'] - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - Awaitable[annotation_spec.AnnotationSpec]]: - r"""Return a callable for the get annotation spec method over gRPC. - - Gets an annotation spec. - - Returns: - Callable[[~.GetAnnotationSpecRequest], - Awaitable[~.AnnotationSpec]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_annotation_spec' not in self._stubs: - self._stubs['get_annotation_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetAnnotationSpec', - request_serializer=service.GetAnnotationSpecRequest.serialize, - response_deserializer=annotation_spec.AnnotationSpec.deserialize, - ) - return self._stubs['get_annotation_spec'] - - @property - def get_table_spec(self) -> Callable[ - [service.GetTableSpecRequest], - Awaitable[table_spec.TableSpec]]: - r"""Return a callable for the get table spec method over gRPC. - - Gets a table spec. - - Returns: - Callable[[~.GetTableSpecRequest], - Awaitable[~.TableSpec]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table_spec' not in self._stubs: - self._stubs['get_table_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetTableSpec', - request_serializer=service.GetTableSpecRequest.serialize, - response_deserializer=table_spec.TableSpec.deserialize, - ) - return self._stubs['get_table_spec'] - - @property - def list_table_specs(self) -> Callable[ - [service.ListTableSpecsRequest], - Awaitable[service.ListTableSpecsResponse]]: - r"""Return a callable for the list table specs method over gRPC. - - Lists table specs in a dataset. - - Returns: - Callable[[~.ListTableSpecsRequest], - Awaitable[~.ListTableSpecsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_table_specs' not in self._stubs: - self._stubs['list_table_specs'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListTableSpecs', - request_serializer=service.ListTableSpecsRequest.serialize, - response_deserializer=service.ListTableSpecsResponse.deserialize, - ) - return self._stubs['list_table_specs'] - - @property - def update_table_spec(self) -> Callable[ - [service.UpdateTableSpecRequest], - Awaitable[gca_table_spec.TableSpec]]: - r"""Return a callable for the update table spec method over gRPC. - - Updates a table spec. - - Returns: - Callable[[~.UpdateTableSpecRequest], - Awaitable[~.TableSpec]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_table_spec' not in self._stubs: - self._stubs['update_table_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UpdateTableSpec', - request_serializer=service.UpdateTableSpecRequest.serialize, - response_deserializer=gca_table_spec.TableSpec.deserialize, - ) - return self._stubs['update_table_spec'] - - @property - def get_column_spec(self) -> Callable[ - [service.GetColumnSpecRequest], - Awaitable[column_spec.ColumnSpec]]: - r"""Return a callable for the get column spec method over gRPC. - - Gets a column spec. - - Returns: - Callable[[~.GetColumnSpecRequest], - Awaitable[~.ColumnSpec]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_column_spec' not in self._stubs: - self._stubs['get_column_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetColumnSpec', - request_serializer=service.GetColumnSpecRequest.serialize, - response_deserializer=column_spec.ColumnSpec.deserialize, - ) - return self._stubs['get_column_spec'] - - @property - def list_column_specs(self) -> Callable[ - [service.ListColumnSpecsRequest], - Awaitable[service.ListColumnSpecsResponse]]: - r"""Return a callable for the list column specs method over gRPC. - - Lists column specs in a table spec. - - Returns: - Callable[[~.ListColumnSpecsRequest], - Awaitable[~.ListColumnSpecsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_column_specs' not in self._stubs: - self._stubs['list_column_specs'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListColumnSpecs', - request_serializer=service.ListColumnSpecsRequest.serialize, - response_deserializer=service.ListColumnSpecsResponse.deserialize, - ) - return self._stubs['list_column_specs'] - - @property - def update_column_spec(self) -> Callable[ - [service.UpdateColumnSpecRequest], - Awaitable[gca_column_spec.ColumnSpec]]: - r"""Return a callable for the update column spec method over gRPC. - - Updates a column spec. - - Returns: - Callable[[~.UpdateColumnSpecRequest], - Awaitable[~.ColumnSpec]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_column_spec' not in self._stubs: - self._stubs['update_column_spec'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UpdateColumnSpec', - request_serializer=service.UpdateColumnSpecRequest.serialize, - response_deserializer=gca_column_spec.ColumnSpec.deserialize, - ) - return self._stubs['update_column_spec'] - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create model method over gRPC. - - Creates a model. Returns a Model in the - [response][google.longrunning.Operation.response] field when it - completes. When you create a model, several model evaluations - are created for it: a global evaluation, and one evaluation for - each annotation spec. - - Returns: - Callable[[~.CreateModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_model' not in self._stubs: - self._stubs['create_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/CreateModel', - request_serializer=service.CreateModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_model'] - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - Awaitable[model.Model]]: - r"""Return a callable for the get model method over gRPC. - - Gets a model. - - Returns: - Callable[[~.GetModelRequest], - Awaitable[~.Model]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model' not in self._stubs: - self._stubs['get_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetModel', - request_serializer=service.GetModelRequest.serialize, - response_deserializer=model.Model.deserialize, - ) - return self._stubs['get_model'] - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - Awaitable[service.ListModelsResponse]]: - r"""Return a callable for the list models method over gRPC. - - Lists models. - - Returns: - Callable[[~.ListModelsRequest], - Awaitable[~.ListModelsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_models' not in self._stubs: - self._stubs['list_models'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListModels', - request_serializer=service.ListModelsRequest.serialize, - response_deserializer=service.ListModelsResponse.deserialize, - ) - return self._stubs['list_models'] - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete model method over gRPC. - - Deletes a model. Returns ``google.protobuf.Empty`` in the - [response][google.longrunning.Operation.response] field when it - completes, and ``delete_details`` in the - [metadata][google.longrunning.Operation.metadata] field. - - Returns: - Callable[[~.DeleteModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_model' not in self._stubs: - self._stubs['delete_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/DeleteModel', - request_serializer=service.DeleteModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_model'] - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the deploy model method over gRPC. - - Deploys a model. If a model is already deployed, deploying it - with the same parameters has no effect. Deploying with different - parametrs (as e.g. changing - - [node_number][google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata.node_number]) - will reset the deployment state without pausing the model's - availability. - - Only applicable for Text Classification, Image Object Detection - , Tables, and Image Segmentation; all other domains manage - deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.DeployModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deploy_model' not in self._stubs: - self._stubs['deploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/DeployModel', - request_serializer=service.DeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['deploy_model'] - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the undeploy model method over gRPC. - - Undeploys a model. If the model is not deployed this method has - no effect. - - Only applicable for Text Classification, Image Object Detection - and Tables; all other domains manage deployment automatically. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.UndeployModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'undeploy_model' not in self._stubs: - self._stubs['undeploy_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/UndeployModel', - request_serializer=service.UndeployModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['undeploy_model'] - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export model method over gRPC. - - Exports a trained, "export-able", model to a user specified - Google Cloud Storage location. A model is considered export-able - if and only if it has an export format defined for it in - - [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportModelRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_model' not in self._stubs: - self._stubs['export_model'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ExportModel', - request_serializer=service.ExportModelRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_model'] - - @property - def export_evaluated_examples(self) -> Callable[ - [service.ExportEvaluatedExamplesRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the export evaluated examples method over gRPC. - - Exports examples on which the model was evaluated (i.e. which - were in the TEST set of the dataset the model was created from), - together with their ground truth annotations and the annotations - created (predicted) by the model. The examples, ground truth and - predictions are exported in the state they were at the moment - the model was evaluated. - - This export is available only for 30 days since the model - evaluation is created. - - Currently only available for Tables. - - Returns an empty response in the - [response][google.longrunning.Operation.response] field when it - completes. - - Returns: - Callable[[~.ExportEvaluatedExamplesRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'export_evaluated_examples' not in self._stubs: - self._stubs['export_evaluated_examples'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ExportEvaluatedExamples', - request_serializer=service.ExportEvaluatedExamplesRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['export_evaluated_examples'] - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - Awaitable[model_evaluation.ModelEvaluation]]: - r"""Return a callable for the get model evaluation method over gRPC. - - Gets a model evaluation. - - Returns: - Callable[[~.GetModelEvaluationRequest], - Awaitable[~.ModelEvaluation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_model_evaluation' not in self._stubs: - self._stubs['get_model_evaluation'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/GetModelEvaluation', - request_serializer=service.GetModelEvaluationRequest.serialize, - response_deserializer=model_evaluation.ModelEvaluation.deserialize, - ) - return self._stubs['get_model_evaluation'] - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - Awaitable[service.ListModelEvaluationsResponse]]: - r"""Return a callable for the list model evaluations method over gRPC. - - Lists model evaluations. - - Returns: - Callable[[~.ListModelEvaluationsRequest], - Awaitable[~.ListModelEvaluationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_model_evaluations' not in self._stubs: - self._stubs['list_model_evaluations'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.AutoMl/ListModelEvaluations', - request_serializer=service.ListModelEvaluationsRequest.serialize, - response_deserializer=service.ListModelEvaluationsResponse.deserialize, - ) - return self._stubs['list_model_evaluations'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_dataset: self._wrap_method( - self.create_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.get_dataset: self._wrap_method( - self.get_dataset, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_datasets: self._wrap_method( - self.list_datasets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_dataset: self._wrap_method( - self.update_dataset, - default_timeout=5.0, - client_info=client_info, - ), - self.delete_dataset: self._wrap_method( - self.delete_dataset, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.import_data: self._wrap_method( - self.import_data, - default_timeout=5.0, - client_info=client_info, - ), - self.export_data: self._wrap_method( - self.export_data, - default_timeout=5.0, - client_info=client_info, - ), - self.get_annotation_spec: self._wrap_method( - self.get_annotation_spec, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.get_table_spec: self._wrap_method( - self.get_table_spec, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_table_specs: self._wrap_method( - self.list_table_specs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_table_spec: self._wrap_method( - self.update_table_spec, - default_timeout=5.0, - client_info=client_info, - ), - self.get_column_spec: self._wrap_method( - self.get_column_spec, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_column_specs: self._wrap_method( - self.list_column_specs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.update_column_spec: self._wrap_method( - self.update_column_spec, - default_timeout=5.0, - client_info=client_info, - ), - self.create_model: self._wrap_method( - self.create_model, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model: self._wrap_method( - self.get_model, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_models: self._wrap_method( - self.list_models, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.delete_model: self._wrap_method( - self.delete_model, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.deploy_model: self._wrap_method( - self.deploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.undeploy_model: self._wrap_method( - self.undeploy_model, - default_timeout=5.0, - client_info=client_info, - ), - self.export_model: self._wrap_method( - self.export_model, - default_timeout=5.0, - client_info=client_info, - ), - self.export_evaluated_examples: self._wrap_method( - self.export_evaluated_examples, - default_timeout=5.0, - client_info=client_info, - ), - self.get_model_evaluation: self._wrap_method( - self.get_model_evaluation, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=5.0, - ), - default_timeout=5.0, - client_info=client_info, - ), - self.list_model_evaluations: self._wrap_method( - self.list_model_evaluations, - default_timeout=5.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'AutoMlGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py deleted file mode 100644 index 569138ccafe4..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/rest.py +++ /dev/null @@ -1,4487 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAutoMlRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AutoMlRestInterceptor: - """Interceptor for AutoMl. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AutoMlRestTransport. - - .. code-block:: python - class MyCustomAutoMlInterceptor(AutoMlRestInterceptor): - def pre_create_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_deploy_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_deploy_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_data(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_data(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_evaluated_examples(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_evaluated_examples(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_export_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_export_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_annotation_spec(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_annotation_spec(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_column_spec(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_column_spec(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_model_evaluation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_model_evaluation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_table_spec(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_table_spec(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_import_data(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_import_data(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_column_specs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_column_specs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_datasets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_datasets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_model_evaluations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_model_evaluations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_models(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_models(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_table_specs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_table_specs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_undeploy_model(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_undeploy_model(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_column_spec(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_column_spec(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_dataset(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_dataset(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_table_spec(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_table_spec(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AutoMlRestTransport(interceptor=MyCustomAutoMlInterceptor()) - client = AutoMlClient(transport=transport) - - - """ - def pre_create_dataset(self, request: service.CreateDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_create_dataset(self, response: gca_dataset.Dataset) -> gca_dataset.Dataset: - """Post-rpc interceptor for create_dataset - - DEPRECATED. Please use the `post_create_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_create_dataset` interceptor runs - before the `post_create_dataset_with_metadata` interceptor. - """ - return response - - def post_create_dataset_with_metadata(self, response: gca_dataset.Dataset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gca_dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_create_dataset_with_metadata` - interceptor in new development instead of the `post_create_dataset` interceptor. - When both interceptors are used, this `post_create_dataset_with_metadata` interceptor runs after the - `post_create_dataset` interceptor. The (possibly modified) response returned by - `post_create_dataset` will be passed to - `post_create_dataset_with_metadata`. - """ - return response, metadata - - def pre_create_model(self, request: service.CreateModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.CreateModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_create_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_model - - DEPRECATED. Please use the `post_create_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_create_model` interceptor runs - before the `post_create_model_with_metadata` interceptor. - """ - return response - - def post_create_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_create_model_with_metadata` - interceptor in new development instead of the `post_create_model` interceptor. - When both interceptors are used, this `post_create_model_with_metadata` interceptor runs after the - `post_create_model` interceptor. The (possibly modified) response returned by - `post_create_model` will be passed to - `post_create_model_with_metadata`. - """ - return response, metadata - - def pre_delete_dataset(self, request: service.DeleteDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_delete_dataset(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_dataset - - DEPRECATED. Please use the `post_delete_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_delete_dataset` interceptor runs - before the `post_delete_dataset_with_metadata` interceptor. - """ - return response - - def post_delete_dataset_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_delete_dataset_with_metadata` - interceptor in new development instead of the `post_delete_dataset` interceptor. - When both interceptors are used, this `post_delete_dataset_with_metadata` interceptor runs after the - `post_delete_dataset` interceptor. The (possibly modified) response returned by - `post_delete_dataset` will be passed to - `post_delete_dataset_with_metadata`. - """ - return response, metadata - - def pre_delete_model(self, request: service.DeleteModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeleteModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_delete_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_model - - DEPRECATED. Please use the `post_delete_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_delete_model` interceptor runs - before the `post_delete_model_with_metadata` interceptor. - """ - return response - - def post_delete_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_delete_model_with_metadata` - interceptor in new development instead of the `post_delete_model` interceptor. - When both interceptors are used, this `post_delete_model_with_metadata` interceptor runs after the - `post_delete_model` interceptor. The (possibly modified) response returned by - `post_delete_model` will be passed to - `post_delete_model_with_metadata`. - """ - return response, metadata - - def pre_deploy_model(self, request: service.DeployModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.DeployModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for deploy_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_deploy_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for deploy_model - - DEPRECATED. Please use the `post_deploy_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_deploy_model` interceptor runs - before the `post_deploy_model_with_metadata` interceptor. - """ - return response - - def post_deploy_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for deploy_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_deploy_model_with_metadata` - interceptor in new development instead of the `post_deploy_model` interceptor. - When both interceptors are used, this `post_deploy_model_with_metadata` interceptor runs after the - `post_deploy_model` interceptor. The (possibly modified) response returned by - `post_deploy_model` will be passed to - `post_deploy_model_with_metadata`. - """ - return response, metadata - - def pre_export_data(self, request: service.ExportDataRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ExportDataRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for export_data - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_export_data(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_data - - DEPRECATED. Please use the `post_export_data_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_export_data` interceptor runs - before the `post_export_data_with_metadata` interceptor. - """ - return response - - def post_export_data_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_data - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_export_data_with_metadata` - interceptor in new development instead of the `post_export_data` interceptor. - When both interceptors are used, this `post_export_data_with_metadata` interceptor runs after the - `post_export_data` interceptor. The (possibly modified) response returned by - `post_export_data` will be passed to - `post_export_data_with_metadata`. - """ - return response, metadata - - def pre_export_evaluated_examples(self, request: service.ExportEvaluatedExamplesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ExportEvaluatedExamplesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for export_evaluated_examples - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_export_evaluated_examples(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_evaluated_examples - - DEPRECATED. Please use the `post_export_evaluated_examples_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_export_evaluated_examples` interceptor runs - before the `post_export_evaluated_examples_with_metadata` interceptor. - """ - return response - - def post_export_evaluated_examples_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_evaluated_examples - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_export_evaluated_examples_with_metadata` - interceptor in new development instead of the `post_export_evaluated_examples` interceptor. - When both interceptors are used, this `post_export_evaluated_examples_with_metadata` interceptor runs after the - `post_export_evaluated_examples` interceptor. The (possibly modified) response returned by - `post_export_evaluated_examples` will be passed to - `post_export_evaluated_examples_with_metadata`. - """ - return response, metadata - - def pre_export_model(self, request: service.ExportModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ExportModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for export_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_export_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for export_model - - DEPRECATED. Please use the `post_export_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_export_model` interceptor runs - before the `post_export_model_with_metadata` interceptor. - """ - return response - - def post_export_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for export_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_export_model_with_metadata` - interceptor in new development instead of the `post_export_model` interceptor. - When both interceptors are used, this `post_export_model_with_metadata` interceptor runs after the - `post_export_model` interceptor. The (possibly modified) response returned by - `post_export_model` will be passed to - `post_export_model_with_metadata`. - """ - return response, metadata - - def pre_get_annotation_spec(self, request: service.GetAnnotationSpecRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetAnnotationSpecRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_annotation_spec - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_annotation_spec(self, response: annotation_spec.AnnotationSpec) -> annotation_spec.AnnotationSpec: - """Post-rpc interceptor for get_annotation_spec - - DEPRECATED. Please use the `post_get_annotation_spec_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_annotation_spec` interceptor runs - before the `post_get_annotation_spec_with_metadata` interceptor. - """ - return response - - def post_get_annotation_spec_with_metadata(self, response: annotation_spec.AnnotationSpec, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[annotation_spec.AnnotationSpec, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_annotation_spec - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_annotation_spec_with_metadata` - interceptor in new development instead of the `post_get_annotation_spec` interceptor. - When both interceptors are used, this `post_get_annotation_spec_with_metadata` interceptor runs after the - `post_get_annotation_spec` interceptor. The (possibly modified) response returned by - `post_get_annotation_spec` will be passed to - `post_get_annotation_spec_with_metadata`. - """ - return response, metadata - - def pre_get_column_spec(self, request: service.GetColumnSpecRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetColumnSpecRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_column_spec - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_column_spec(self, response: column_spec.ColumnSpec) -> column_spec.ColumnSpec: - """Post-rpc interceptor for get_column_spec - - DEPRECATED. Please use the `post_get_column_spec_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_column_spec` interceptor runs - before the `post_get_column_spec_with_metadata` interceptor. - """ - return response - - def post_get_column_spec_with_metadata(self, response: column_spec.ColumnSpec, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[column_spec.ColumnSpec, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_column_spec - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_column_spec_with_metadata` - interceptor in new development instead of the `post_get_column_spec` interceptor. - When both interceptors are used, this `post_get_column_spec_with_metadata` interceptor runs after the - `post_get_column_spec` interceptor. The (possibly modified) response returned by - `post_get_column_spec` will be passed to - `post_get_column_spec_with_metadata`. - """ - return response, metadata - - def pre_get_dataset(self, request: service.GetDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_dataset(self, response: dataset.Dataset) -> dataset.Dataset: - """Post-rpc interceptor for get_dataset - - DEPRECATED. Please use the `post_get_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_dataset` interceptor runs - before the `post_get_dataset_with_metadata` interceptor. - """ - return response - - def post_get_dataset_with_metadata(self, response: dataset.Dataset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_dataset_with_metadata` - interceptor in new development instead of the `post_get_dataset` interceptor. - When both interceptors are used, this `post_get_dataset_with_metadata` interceptor runs after the - `post_get_dataset` interceptor. The (possibly modified) response returned by - `post_get_dataset` will be passed to - `post_get_dataset_with_metadata`. - """ - return response, metadata - - def pre_get_model(self, request: service.GetModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_model(self, response: model.Model) -> model.Model: - """Post-rpc interceptor for get_model - - DEPRECATED. Please use the `post_get_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_model` interceptor runs - before the `post_get_model_with_metadata` interceptor. - """ - return response - - def post_get_model_with_metadata(self, response: model.Model, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[model.Model, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_model_with_metadata` - interceptor in new development instead of the `post_get_model` interceptor. - When both interceptors are used, this `post_get_model_with_metadata` interceptor runs after the - `post_get_model` interceptor. The (possibly modified) response returned by - `post_get_model` will be passed to - `post_get_model_with_metadata`. - """ - return response, metadata - - def pre_get_model_evaluation(self, request: service.GetModelEvaluationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetModelEvaluationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_model_evaluation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_model_evaluation(self, response: model_evaluation.ModelEvaluation) -> model_evaluation.ModelEvaluation: - """Post-rpc interceptor for get_model_evaluation - - DEPRECATED. Please use the `post_get_model_evaluation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_model_evaluation` interceptor runs - before the `post_get_model_evaluation_with_metadata` interceptor. - """ - return response - - def post_get_model_evaluation_with_metadata(self, response: model_evaluation.ModelEvaluation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[model_evaluation.ModelEvaluation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_model_evaluation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_model_evaluation_with_metadata` - interceptor in new development instead of the `post_get_model_evaluation` interceptor. - When both interceptors are used, this `post_get_model_evaluation_with_metadata` interceptor runs after the - `post_get_model_evaluation` interceptor. The (possibly modified) response returned by - `post_get_model_evaluation` will be passed to - `post_get_model_evaluation_with_metadata`. - """ - return response, metadata - - def pre_get_table_spec(self, request: service.GetTableSpecRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.GetTableSpecRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_table_spec - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_get_table_spec(self, response: table_spec.TableSpec) -> table_spec.TableSpec: - """Post-rpc interceptor for get_table_spec - - DEPRECATED. Please use the `post_get_table_spec_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_get_table_spec` interceptor runs - before the `post_get_table_spec_with_metadata` interceptor. - """ - return response - - def post_get_table_spec_with_metadata(self, response: table_spec.TableSpec, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[table_spec.TableSpec, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_table_spec - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_get_table_spec_with_metadata` - interceptor in new development instead of the `post_get_table_spec` interceptor. - When both interceptors are used, this `post_get_table_spec_with_metadata` interceptor runs after the - `post_get_table_spec` interceptor. The (possibly modified) response returned by - `post_get_table_spec` will be passed to - `post_get_table_spec_with_metadata`. - """ - return response, metadata - - def pre_import_data(self, request: service.ImportDataRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ImportDataRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for import_data - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_import_data(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for import_data - - DEPRECATED. Please use the `post_import_data_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_import_data` interceptor runs - before the `post_import_data_with_metadata` interceptor. - """ - return response - - def post_import_data_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for import_data - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_import_data_with_metadata` - interceptor in new development instead of the `post_import_data` interceptor. - When both interceptors are used, this `post_import_data_with_metadata` interceptor runs after the - `post_import_data` interceptor. The (possibly modified) response returned by - `post_import_data` will be passed to - `post_import_data_with_metadata`. - """ - return response, metadata - - def pre_list_column_specs(self, request: service.ListColumnSpecsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListColumnSpecsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_column_specs - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_column_specs(self, response: service.ListColumnSpecsResponse) -> service.ListColumnSpecsResponse: - """Post-rpc interceptor for list_column_specs - - DEPRECATED. Please use the `post_list_column_specs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_column_specs` interceptor runs - before the `post_list_column_specs_with_metadata` interceptor. - """ - return response - - def post_list_column_specs_with_metadata(self, response: service.ListColumnSpecsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListColumnSpecsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_column_specs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_column_specs_with_metadata` - interceptor in new development instead of the `post_list_column_specs` interceptor. - When both interceptors are used, this `post_list_column_specs_with_metadata` interceptor runs after the - `post_list_column_specs` interceptor. The (possibly modified) response returned by - `post_list_column_specs` will be passed to - `post_list_column_specs_with_metadata`. - """ - return response, metadata - - def pre_list_datasets(self, request: service.ListDatasetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListDatasetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_datasets - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_datasets(self, response: service.ListDatasetsResponse) -> service.ListDatasetsResponse: - """Post-rpc interceptor for list_datasets - - DEPRECATED. Please use the `post_list_datasets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_datasets` interceptor runs - before the `post_list_datasets_with_metadata` interceptor. - """ - return response - - def post_list_datasets_with_metadata(self, response: service.ListDatasetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListDatasetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_datasets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_datasets_with_metadata` - interceptor in new development instead of the `post_list_datasets` interceptor. - When both interceptors are used, this `post_list_datasets_with_metadata` interceptor runs after the - `post_list_datasets` interceptor. The (possibly modified) response returned by - `post_list_datasets` will be passed to - `post_list_datasets_with_metadata`. - """ - return response, metadata - - def pre_list_model_evaluations(self, request: service.ListModelEvaluationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelEvaluationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_model_evaluations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_model_evaluations(self, response: service.ListModelEvaluationsResponse) -> service.ListModelEvaluationsResponse: - """Post-rpc interceptor for list_model_evaluations - - DEPRECATED. Please use the `post_list_model_evaluations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_model_evaluations` interceptor runs - before the `post_list_model_evaluations_with_metadata` interceptor. - """ - return response - - def post_list_model_evaluations_with_metadata(self, response: service.ListModelEvaluationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelEvaluationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_model_evaluations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_model_evaluations_with_metadata` - interceptor in new development instead of the `post_list_model_evaluations` interceptor. - When both interceptors are used, this `post_list_model_evaluations_with_metadata` interceptor runs after the - `post_list_model_evaluations` interceptor. The (possibly modified) response returned by - `post_list_model_evaluations` will be passed to - `post_list_model_evaluations_with_metadata`. - """ - return response, metadata - - def pre_list_models(self, request: service.ListModelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_models - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_models(self, response: service.ListModelsResponse) -> service.ListModelsResponse: - """Post-rpc interceptor for list_models - - DEPRECATED. Please use the `post_list_models_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_models` interceptor runs - before the `post_list_models_with_metadata` interceptor. - """ - return response - - def post_list_models_with_metadata(self, response: service.ListModelsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListModelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_models - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_models_with_metadata` - interceptor in new development instead of the `post_list_models` interceptor. - When both interceptors are used, this `post_list_models_with_metadata` interceptor runs after the - `post_list_models` interceptor. The (possibly modified) response returned by - `post_list_models` will be passed to - `post_list_models_with_metadata`. - """ - return response, metadata - - def pre_list_table_specs(self, request: service.ListTableSpecsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListTableSpecsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_table_specs - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_list_table_specs(self, response: service.ListTableSpecsResponse) -> service.ListTableSpecsResponse: - """Post-rpc interceptor for list_table_specs - - DEPRECATED. Please use the `post_list_table_specs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_list_table_specs` interceptor runs - before the `post_list_table_specs_with_metadata` interceptor. - """ - return response - - def post_list_table_specs_with_metadata(self, response: service.ListTableSpecsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.ListTableSpecsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_table_specs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_list_table_specs_with_metadata` - interceptor in new development instead of the `post_list_table_specs` interceptor. - When both interceptors are used, this `post_list_table_specs_with_metadata` interceptor runs after the - `post_list_table_specs` interceptor. The (possibly modified) response returned by - `post_list_table_specs` will be passed to - `post_list_table_specs_with_metadata`. - """ - return response, metadata - - def pre_undeploy_model(self, request: service.UndeployModelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UndeployModelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for undeploy_model - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_undeploy_model(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for undeploy_model - - DEPRECATED. Please use the `post_undeploy_model_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_undeploy_model` interceptor runs - before the `post_undeploy_model_with_metadata` interceptor. - """ - return response - - def post_undeploy_model_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for undeploy_model - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_undeploy_model_with_metadata` - interceptor in new development instead of the `post_undeploy_model` interceptor. - When both interceptors are used, this `post_undeploy_model_with_metadata` interceptor runs after the - `post_undeploy_model` interceptor. The (possibly modified) response returned by - `post_undeploy_model` will be passed to - `post_undeploy_model_with_metadata`. - """ - return response, metadata - - def pre_update_column_spec(self, request: service.UpdateColumnSpecRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateColumnSpecRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_column_spec - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_update_column_spec(self, response: gca_column_spec.ColumnSpec) -> gca_column_spec.ColumnSpec: - """Post-rpc interceptor for update_column_spec - - DEPRECATED. Please use the `post_update_column_spec_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_update_column_spec` interceptor runs - before the `post_update_column_spec_with_metadata` interceptor. - """ - return response - - def post_update_column_spec_with_metadata(self, response: gca_column_spec.ColumnSpec, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gca_column_spec.ColumnSpec, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_column_spec - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_update_column_spec_with_metadata` - interceptor in new development instead of the `post_update_column_spec` interceptor. - When both interceptors are used, this `post_update_column_spec_with_metadata` interceptor runs after the - `post_update_column_spec` interceptor. The (possibly modified) response returned by - `post_update_column_spec` will be passed to - `post_update_column_spec_with_metadata`. - """ - return response, metadata - - def pre_update_dataset(self, request: service.UpdateDatasetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateDatasetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_dataset - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_update_dataset(self, response: gca_dataset.Dataset) -> gca_dataset.Dataset: - """Post-rpc interceptor for update_dataset - - DEPRECATED. Please use the `post_update_dataset_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_update_dataset` interceptor runs - before the `post_update_dataset_with_metadata` interceptor. - """ - return response - - def post_update_dataset_with_metadata(self, response: gca_dataset.Dataset, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gca_dataset.Dataset, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_dataset - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_update_dataset_with_metadata` - interceptor in new development instead of the `post_update_dataset` interceptor. - When both interceptors are used, this `post_update_dataset_with_metadata` interceptor runs after the - `post_update_dataset` interceptor. The (possibly modified) response returned by - `post_update_dataset` will be passed to - `post_update_dataset_with_metadata`. - """ - return response, metadata - - def pre_update_table_spec(self, request: service.UpdateTableSpecRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[service.UpdateTableSpecRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_table_spec - - Override in a subclass to manipulate the request or metadata - before they are sent to the AutoMl server. - """ - return request, metadata - - def post_update_table_spec(self, response: gca_table_spec.TableSpec) -> gca_table_spec.TableSpec: - """Post-rpc interceptor for update_table_spec - - DEPRECATED. Please use the `post_update_table_spec_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AutoMl server but before - it is returned to user code. This `post_update_table_spec` interceptor runs - before the `post_update_table_spec_with_metadata` interceptor. - """ - return response - - def post_update_table_spec_with_metadata(self, response: gca_table_spec.TableSpec, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gca_table_spec.TableSpec, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_table_spec - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AutoMl server but before it is returned to user code. - - We recommend only using this `post_update_table_spec_with_metadata` - interceptor in new development instead of the `post_update_table_spec` interceptor. - When both interceptors are used, this `post_update_table_spec_with_metadata` interceptor runs after the - `post_update_table_spec` interceptor. The (possibly modified) response returned by - `post_update_table_spec` will be passed to - `post_update_table_spec_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class AutoMlRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AutoMlRestInterceptor - - -class AutoMlRestTransport(_BaseAutoMlRestTransport): - """REST backend synchronous transport for AutoMl. - - AutoML Server API. - - The resource names are assigned by the server. The server never - reuses names that it has created after the resources with those - names are deleted. - - An ID of a resource is the last element of the item's resource name. - For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AutoMlRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AutoMlRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*}/operations', - }, - ], - 'google.longrunning.Operations.WaitOperation': [ - { - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1beta1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateDataset(_BaseAutoMlRestTransport._BaseCreateDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.CreateDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gca_dataset.Dataset: - r"""Call the create dataset method over HTTP. - - Args: - request (~.service.CreateDatasetRequest): - The request object. Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1beta1.AutoMl.CreateDataset]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gca_dataset.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - - http_options = _BaseAutoMlRestTransport._BaseCreateDataset._get_http_options() - - request, metadata = self._interceptor.pre_create_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseCreateDataset._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseCreateDataset._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseCreateDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.CreateDataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "CreateDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._CreateDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gca_dataset.Dataset() - pb_resp = gca_dataset.Dataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gca_dataset.Dataset.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.create_dataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "CreateDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateModel(_BaseAutoMlRestTransport._BaseCreateModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.CreateModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.CreateModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create model method over HTTP. - - Args: - request (~.service.CreateModelRequest): - The request object. Request message for - [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseCreateModel._get_http_options() - - request, metadata = self._interceptor.pre_create_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseCreateModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseCreateModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseCreateModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.CreateModel", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "CreateModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._CreateModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.create_model", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "CreateModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataset(_BaseAutoMlRestTransport._BaseDeleteDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.DeleteDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete dataset method over HTTP. - - Args: - request (~.service.DeleteDatasetRequest): - The request object. Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1beta1.AutoMl.DeleteDataset]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseDeleteDataset._get_http_options() - - request, metadata = self._interceptor.pre_delete_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseDeleteDataset._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseDeleteDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.DeleteDataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "DeleteDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._DeleteDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.delete_dataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "DeleteDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteModel(_BaseAutoMlRestTransport._BaseDeleteModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.DeleteModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.DeleteModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete model method over HTTP. - - Args: - request (~.service.DeleteModelRequest): - The request object. Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1beta1.AutoMl.DeleteModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseDeleteModel._get_http_options() - - request, metadata = self._interceptor.pre_delete_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseDeleteModel._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseDeleteModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.DeleteModel", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "DeleteModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._DeleteModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.delete_model", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "DeleteModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeployModel(_BaseAutoMlRestTransport._BaseDeployModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.DeployModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.DeployModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the deploy model method over HTTP. - - Args: - request (~.service.DeployModelRequest): - The request object. Request message for - [AutoMl.DeployModel][google.cloud.automl.v1beta1.AutoMl.DeployModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseDeployModel._get_http_options() - - request, metadata = self._interceptor.pre_deploy_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseDeployModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseDeployModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseDeployModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.DeployModel", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "DeployModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._DeployModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_deploy_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_deploy_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.deploy_model", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "DeployModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExportData(_BaseAutoMlRestTransport._BaseExportData, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ExportData") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ExportDataRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the export data method over HTTP. - - Args: - request (~.service.ExportDataRequest): - The request object. Request message for - [AutoMl.ExportData][google.cloud.automl.v1beta1.AutoMl.ExportData]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseExportData._get_http_options() - - request, metadata = self._interceptor.pre_export_data(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseExportData._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseExportData._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseExportData._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ExportData", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ExportData", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ExportData._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_data(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_data_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.export_data", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ExportData", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExportEvaluatedExamples(_BaseAutoMlRestTransport._BaseExportEvaluatedExamples, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ExportEvaluatedExamples") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ExportEvaluatedExamplesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the export evaluated examples method over HTTP. - - Args: - request (~.service.ExportEvaluatedExamplesRequest): - The request object. Request message for - [AutoMl.ExportEvaluatedExamples][google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseExportEvaluatedExamples._get_http_options() - - request, metadata = self._interceptor.pre_export_evaluated_examples(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseExportEvaluatedExamples._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseExportEvaluatedExamples._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseExportEvaluatedExamples._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ExportEvaluatedExamples", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ExportEvaluatedExamples", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ExportEvaluatedExamples._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_evaluated_examples(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_evaluated_examples_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.export_evaluated_examples", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ExportEvaluatedExamples", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ExportModel(_BaseAutoMlRestTransport._BaseExportModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ExportModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ExportModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the export model method over HTTP. - - Args: - request (~.service.ExportModelRequest): - The request object. Request message for - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an - error code will be returned. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseExportModel._get_http_options() - - request, metadata = self._interceptor.pre_export_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseExportModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseExportModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseExportModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ExportModel", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ExportModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ExportModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_export_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.export_model", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ExportModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAnnotationSpec(_BaseAutoMlRestTransport._BaseGetAnnotationSpec, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetAnnotationSpec") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetAnnotationSpecRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> annotation_spec.AnnotationSpec: - r"""Call the get annotation spec method over HTTP. - - Args: - request (~.service.GetAnnotationSpecRequest): - The request object. Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.annotation_spec.AnnotationSpec: - A definition of an annotation spec. - """ - - http_options = _BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_http_options() - - request, metadata = self._interceptor.pre_get_annotation_spec(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.GetAnnotationSpec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetAnnotationSpec", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetAnnotationSpec._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = annotation_spec.AnnotationSpec() - pb_resp = annotation_spec.AnnotationSpec.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_annotation_spec(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_annotation_spec_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = annotation_spec.AnnotationSpec.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.get_annotation_spec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetAnnotationSpec", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetColumnSpec(_BaseAutoMlRestTransport._BaseGetColumnSpec, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetColumnSpec") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetColumnSpecRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> column_spec.ColumnSpec: - r"""Call the get column spec method over HTTP. - - Args: - request (~.service.GetColumnSpecRequest): - The request object. Request message for - [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.column_spec.ColumnSpec: - A representation of a column in a relational table. When - listing them, column specs are returned in the same - order in which they were given on import . Used by: - - - Tables - - """ - - http_options = _BaseAutoMlRestTransport._BaseGetColumnSpec._get_http_options() - - request, metadata = self._interceptor.pre_get_column_spec(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetColumnSpec._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetColumnSpec._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.GetColumnSpec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetColumnSpec", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetColumnSpec._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = column_spec.ColumnSpec() - pb_resp = column_spec.ColumnSpec.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_column_spec(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_column_spec_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = column_spec.ColumnSpec.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.get_column_spec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetColumnSpec", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataset(_BaseAutoMlRestTransport._BaseGetDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> dataset.Dataset: - r"""Call the get dataset method over HTTP. - - Args: - request (~.service.GetDatasetRequest): - The request object. Request message for - [AutoMl.GetDataset][google.cloud.automl.v1beta1.AutoMl.GetDataset]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.dataset.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - - http_options = _BaseAutoMlRestTransport._BaseGetDataset._get_http_options() - - request, metadata = self._interceptor.pre_get_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetDataset._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.GetDataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = dataset.Dataset() - pb_resp = dataset.Dataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = dataset.Dataset.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.get_dataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetModel(_BaseAutoMlRestTransport._BaseGetModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> model.Model: - r"""Call the get model method over HTTP. - - Args: - request (~.service.GetModelRequest): - The request object. Request message for - [AutoMl.GetModel][google.cloud.automl.v1beta1.AutoMl.GetModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.model.Model: - API proto representing a trained - machine learning model. - - """ - - http_options = _BaseAutoMlRestTransport._BaseGetModel._get_http_options() - - request, metadata = self._interceptor.pre_get_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetModel._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.GetModel", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = model.Model() - pb_resp = model.Model.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = model.Model.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.get_model", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetModelEvaluation(_BaseAutoMlRestTransport._BaseGetModelEvaluation, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetModelEvaluation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetModelEvaluationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> model_evaluation.ModelEvaluation: - r"""Call the get model evaluation method over HTTP. - - Args: - request (~.service.GetModelEvaluationRequest): - The request object. Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.model_evaluation.ModelEvaluation: - Evaluation results of a model. - """ - - http_options = _BaseAutoMlRestTransport._BaseGetModelEvaluation._get_http_options() - - request, metadata = self._interceptor.pre_get_model_evaluation(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetModelEvaluation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetModelEvaluation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.GetModelEvaluation", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetModelEvaluation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetModelEvaluation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = model_evaluation.ModelEvaluation() - pb_resp = model_evaluation.ModelEvaluation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_model_evaluation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_model_evaluation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = model_evaluation.ModelEvaluation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.get_model_evaluation", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetModelEvaluation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTableSpec(_BaseAutoMlRestTransport._BaseGetTableSpec, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.GetTableSpec") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.GetTableSpecRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> table_spec.TableSpec: - r"""Call the get table spec method over HTTP. - - Args: - request (~.service.GetTableSpecRequest): - The request object. Request message for - [AutoMl.GetTableSpec][google.cloud.automl.v1beta1.AutoMl.GetTableSpec]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.table_spec.TableSpec: - A specification of a relational table. The table's - schema is represented via its child column specs. It is - pre-populated as part of ImportData by schema inference - algorithm, the version of which is a required parameter - of ImportData InputConfig. Note: While working with a - table, at times the schema may be inconsistent with the - data in the table (e.g. string in a FLOAT64 column). The - consistency validation is done upon creation of a model. - Used by: - - - Tables - - """ - - http_options = _BaseAutoMlRestTransport._BaseGetTableSpec._get_http_options() - - request, metadata = self._interceptor.pre_get_table_spec(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseGetTableSpec._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseGetTableSpec._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.GetTableSpec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetTableSpec", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._GetTableSpec._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = table_spec.TableSpec() - pb_resp = table_spec.TableSpec.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_table_spec(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_table_spec_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = table_spec.TableSpec.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.get_table_spec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "GetTableSpec", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ImportData(_BaseAutoMlRestTransport._BaseImportData, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ImportData") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.ImportDataRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the import data method over HTTP. - - Args: - request (~.service.ImportDataRequest): - The request object. Request message for - [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseImportData._get_http_options() - - request, metadata = self._interceptor.pre_import_data(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseImportData._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseImportData._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseImportData._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ImportData", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ImportData", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ImportData._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_import_data(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_import_data_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.import_data", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ImportData", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListColumnSpecs(_BaseAutoMlRestTransport._BaseListColumnSpecs, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListColumnSpecs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListColumnSpecsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListColumnSpecsResponse: - r"""Call the list column specs method over HTTP. - - Args: - request (~.service.ListColumnSpecsRequest): - The request object. Request message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListColumnSpecsResponse: - Response message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListColumnSpecs._get_http_options() - - request, metadata = self._interceptor.pre_list_column_specs(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListColumnSpecs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListColumnSpecs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ListColumnSpecs", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListColumnSpecs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListColumnSpecs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListColumnSpecsResponse() - pb_resp = service.ListColumnSpecsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_column_specs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_column_specs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListColumnSpecsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.list_column_specs", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListColumnSpecs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatasets(_BaseAutoMlRestTransport._BaseListDatasets, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListDatasets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListDatasetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListDatasetsResponse: - r"""Call the list datasets method over HTTP. - - Args: - request (~.service.ListDatasetsRequest): - The request object. Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListDatasetsResponse: - Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListDatasets._get_http_options() - - request, metadata = self._interceptor.pre_list_datasets(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListDatasets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListDatasets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ListDatasets", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListDatasets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListDatasets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListDatasetsResponse() - pb_resp = service.ListDatasetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_datasets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_datasets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListDatasetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.list_datasets", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListDatasets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListModelEvaluations(_BaseAutoMlRestTransport._BaseListModelEvaluations, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListModelEvaluations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListModelEvaluationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListModelEvaluationsResponse: - r"""Call the list model evaluations method over HTTP. - - Args: - request (~.service.ListModelEvaluationsRequest): - The request object. Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListModelEvaluationsResponse: - Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListModelEvaluations._get_http_options() - - request, metadata = self._interceptor.pre_list_model_evaluations(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListModelEvaluations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListModelEvaluations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ListModelEvaluations", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListModelEvaluations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListModelEvaluations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListModelEvaluationsResponse() - pb_resp = service.ListModelEvaluationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_model_evaluations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_model_evaluations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListModelEvaluationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.list_model_evaluations", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListModelEvaluations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListModels(_BaseAutoMlRestTransport._BaseListModels, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListModels") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListModelsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListModelsResponse: - r"""Call the list models method over HTTP. - - Args: - request (~.service.ListModelsRequest): - The request object. Request message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListModelsResponse: - Response message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListModels._get_http_options() - - request, metadata = self._interceptor.pre_list_models(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListModels._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListModels._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ListModels", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListModels", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListModels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListModelsResponse() - pb_resp = service.ListModelsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_models(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_models_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListModelsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.list_models", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListModels", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTableSpecs(_BaseAutoMlRestTransport._BaseListTableSpecs, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.ListTableSpecs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: service.ListTableSpecsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> service.ListTableSpecsResponse: - r"""Call the list table specs method over HTTP. - - Args: - request (~.service.ListTableSpecsRequest): - The request object. Request message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.service.ListTableSpecsResponse: - Response message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - - """ - - http_options = _BaseAutoMlRestTransport._BaseListTableSpecs._get_http_options() - - request, metadata = self._interceptor.pre_list_table_specs(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseListTableSpecs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseListTableSpecs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.ListTableSpecs", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListTableSpecs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._ListTableSpecs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = service.ListTableSpecsResponse() - pb_resp = service.ListTableSpecsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_table_specs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_table_specs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = service.ListTableSpecsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.list_table_specs", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "ListTableSpecs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UndeployModel(_BaseAutoMlRestTransport._BaseUndeployModel, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UndeployModel") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UndeployModelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the undeploy model method over HTTP. - - Args: - request (~.service.UndeployModelRequest): - The request object. Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1beta1.AutoMl.UndeployModel]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAutoMlRestTransport._BaseUndeployModel._get_http_options() - - request, metadata = self._interceptor.pre_undeploy_model(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUndeployModel._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUndeployModel._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUndeployModel._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.UndeployModel", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UndeployModel", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UndeployModel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_undeploy_model(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_undeploy_model_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.undeploy_model", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UndeployModel", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateColumnSpec(_BaseAutoMlRestTransport._BaseUpdateColumnSpec, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UpdateColumnSpec") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateColumnSpecRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gca_column_spec.ColumnSpec: - r"""Call the update column spec method over HTTP. - - Args: - request (~.service.UpdateColumnSpecRequest): - The request object. Request message for - [AutoMl.UpdateColumnSpec][google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gca_column_spec.ColumnSpec: - A representation of a column in a relational table. When - listing them, column specs are returned in the same - order in which they were given on import . Used by: - - - Tables - - """ - - http_options = _BaseAutoMlRestTransport._BaseUpdateColumnSpec._get_http_options() - - request, metadata = self._interceptor.pre_update_column_spec(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUpdateColumnSpec._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUpdateColumnSpec._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUpdateColumnSpec._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.UpdateColumnSpec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UpdateColumnSpec", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UpdateColumnSpec._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gca_column_spec.ColumnSpec() - pb_resp = gca_column_spec.ColumnSpec.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_column_spec(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_column_spec_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gca_column_spec.ColumnSpec.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.update_column_spec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UpdateColumnSpec", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataset(_BaseAutoMlRestTransport._BaseUpdateDataset, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UpdateDataset") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateDatasetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gca_dataset.Dataset: - r"""Call the update dataset method over HTTP. - - Args: - request (~.service.UpdateDatasetRequest): - The request object. Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1beta1.AutoMl.UpdateDataset] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gca_dataset.Dataset: - A workspace for solving a single, - particular machine learning (ML) - problem. A workspace contains examples - that may be annotated. - - """ - - http_options = _BaseAutoMlRestTransport._BaseUpdateDataset._get_http_options() - - request, metadata = self._interceptor.pre_update_dataset(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUpdateDataset._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUpdateDataset._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUpdateDataset._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.UpdateDataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UpdateDataset", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UpdateDataset._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gca_dataset.Dataset() - pb_resp = gca_dataset.Dataset.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_dataset(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_dataset_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gca_dataset.Dataset.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.update_dataset", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UpdateDataset", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateTableSpec(_BaseAutoMlRestTransport._BaseUpdateTableSpec, AutoMlRestStub): - def __hash__(self): - return hash("AutoMlRestTransport.UpdateTableSpec") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: service.UpdateTableSpecRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gca_table_spec.TableSpec: - r"""Call the update table spec method over HTTP. - - Args: - request (~.service.UpdateTableSpecRequest): - The request object. Request message for - [AutoMl.UpdateTableSpec][google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec] - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gca_table_spec.TableSpec: - A specification of a relational table. The table's - schema is represented via its child column specs. It is - pre-populated as part of ImportData by schema inference - algorithm, the version of which is a required parameter - of ImportData InputConfig. Note: While working with a - table, at times the schema may be inconsistent with the - data in the table (e.g. string in a FLOAT64 column). The - consistency validation is done upon creation of a model. - Used by: - - - Tables - - """ - - http_options = _BaseAutoMlRestTransport._BaseUpdateTableSpec._get_http_options() - - request, metadata = self._interceptor.pre_update_table_spec(request, metadata) - transcoded_request = _BaseAutoMlRestTransport._BaseUpdateTableSpec._get_transcoded_request(http_options, request) - - body = _BaseAutoMlRestTransport._BaseUpdateTableSpec._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAutoMlRestTransport._BaseUpdateTableSpec._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.AutoMlClient.UpdateTableSpec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UpdateTableSpec", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AutoMlRestTransport._UpdateTableSpec._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gca_table_spec.TableSpec() - pb_resp = gca_table_spec.TableSpec.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_table_spec(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_table_spec_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gca_table_spec.TableSpec.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.AutoMlClient.update_table_spec", - extra = { - "serviceName": "google.cloud.automl.v1beta1.AutoMl", - "rpcName": "UpdateTableSpec", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_dataset(self) -> Callable[ - [service.CreateDatasetRequest], - gca_dataset.Dataset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_model(self) -> Callable[ - [service.CreateModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_dataset(self) -> Callable[ - [service.DeleteDatasetRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_model(self) -> Callable[ - [service.DeleteModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def deploy_model(self) -> Callable[ - [service.DeployModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeployModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_data(self) -> Callable[ - [service.ExportDataRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportData(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_evaluated_examples(self) -> Callable[ - [service.ExportEvaluatedExamplesRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportEvaluatedExamples(self._session, self._host, self._interceptor) # type: ignore - - @property - def export_model(self) -> Callable[ - [service.ExportModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ExportModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_annotation_spec(self) -> Callable[ - [service.GetAnnotationSpecRequest], - annotation_spec.AnnotationSpec]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAnnotationSpec(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_column_spec(self) -> Callable[ - [service.GetColumnSpecRequest], - column_spec.ColumnSpec]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetColumnSpec(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_dataset(self) -> Callable[ - [service.GetDatasetRequest], - dataset.Dataset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_model(self) -> Callable[ - [service.GetModelRequest], - model.Model]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_model_evaluation(self) -> Callable[ - [service.GetModelEvaluationRequest], - model_evaluation.ModelEvaluation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetModelEvaluation(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_table_spec(self) -> Callable[ - [service.GetTableSpecRequest], - table_spec.TableSpec]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTableSpec(self._session, self._host, self._interceptor) # type: ignore - - @property - def import_data(self) -> Callable[ - [service.ImportDataRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ImportData(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_column_specs(self) -> Callable[ - [service.ListColumnSpecsRequest], - service.ListColumnSpecsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListColumnSpecs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_datasets(self) -> Callable[ - [service.ListDatasetsRequest], - service.ListDatasetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatasets(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_model_evaluations(self) -> Callable[ - [service.ListModelEvaluationsRequest], - service.ListModelEvaluationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListModelEvaluations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_models(self) -> Callable[ - [service.ListModelsRequest], - service.ListModelsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListModels(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_table_specs(self) -> Callable[ - [service.ListTableSpecsRequest], - service.ListTableSpecsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTableSpecs(self._session, self._host, self._interceptor) # type: ignore - - @property - def undeploy_model(self) -> Callable[ - [service.UndeployModelRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UndeployModel(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_column_spec(self) -> Callable[ - [service.UpdateColumnSpecRequest], - gca_column_spec.ColumnSpec]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateColumnSpec(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_dataset(self) -> Callable[ - [service.UpdateDatasetRequest], - gca_dataset.Dataset]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataset(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_table_spec(self) -> Callable[ - [service.UpdateTableSpecRequest], - gca_table_spec.TableSpec]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateTableSpec(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AutoMlRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/rest_base.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/rest_base.py deleted file mode 100644 index 67d9ef978612..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/auto_ml/transports/rest_base.py +++ /dev/null @@ -1,1099 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import AutoMlTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAutoMlRestTransport(AutoMlTransport): - """Base REST backend transport for AutoMl. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{parent=projects/*/locations/*}/datasets', - 'body': 'dataset', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseCreateDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{parent=projects/*/locations/*}/models', - 'body': 'model', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.CreateModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseCreateModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseDeleteDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeleteModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseDeleteModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeployModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}:deploy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.DeployModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseDeployModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportData: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*}:exportData', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ExportDataRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseExportData._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportEvaluatedExamples: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}:exportEvaluatedExamples', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ExportEvaluatedExamplesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseExportEvaluatedExamples._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseExportModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}:export', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ExportModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseExportModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAnnotationSpec: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*/annotationSpecs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetAnnotationSpecRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetAnnotationSpec._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetColumnSpec: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetColumnSpecRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetColumnSpec._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetModelEvaluation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*/modelEvaluations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetModelEvaluationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetModelEvaluation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTableSpec: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.GetTableSpecRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseGetTableSpec._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseImportData: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/datasets/*}:importData', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ImportDataRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseImportData._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListColumnSpecs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{parent=projects/*/locations/*/datasets/*/tableSpecs/*}/columnSpecs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListColumnSpecsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListColumnSpecs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatasets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{parent=projects/*/locations/*}/datasets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListDatasetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListDatasets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListModelEvaluations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{parent=projects/*/locations/*/models/*}/modelEvaluations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListModelEvaluationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListModelEvaluations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListModels: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{parent=projects/*/locations/*}/models', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListModelsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListModels._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTableSpecs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1beta1/{parent=projects/*/locations/*/datasets/*}/tableSpecs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.ListTableSpecsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseListTableSpecs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUndeployModel: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}:undeploy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UndeployModelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUndeployModel._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateColumnSpec: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1beta1/{column_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}', - 'body': 'column_spec', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateColumnSpecRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUpdateColumnSpec._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataset: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1beta1/{dataset.name=projects/*/locations/*/datasets/*}', - 'body': 'dataset', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateDatasetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUpdateDataset._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateTableSpec: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1beta1/{table_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*}', - 'body': 'table_spec', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = service.UpdateTableSpecRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAutoMlRestTransport._BaseUpdateTableSpec._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseAutoMlRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/__init__.py deleted file mode 100644 index 857ae0200982..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PredictionServiceClient -from .async_client import PredictionServiceAsyncClient - -__all__ = ( - 'PredictionServiceClient', - 'PredictionServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/async_client.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/async_client.py deleted file mode 100644 index 9b3ea4d87d4a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/async_client.py +++ /dev/null @@ -1,693 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.automl_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1beta1.types import annotation_payload -from google.cloud.automl_v1beta1.types import data_items -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import operations -from google.cloud.automl_v1beta1.types import prediction_service -from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport -from .client import PredictionServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class PredictionServiceAsyncClient: - """AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - """ - - _client: PredictionServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = PredictionServiceClient._DEFAULT_UNIVERSE - - model_path = staticmethod(PredictionServiceClient.model_path) - parse_model_path = staticmethod(PredictionServiceClient.parse_model_path) - common_billing_account_path = staticmethod(PredictionServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PredictionServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PredictionServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(PredictionServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(PredictionServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(PredictionServiceClient.parse_common_organization_path) - common_project_path = staticmethod(PredictionServiceClient.common_project_path) - parse_common_project_path = staticmethod(PredictionServiceClient.parse_common_project_path) - common_location_path = staticmethod(PredictionServiceClient.common_location_path) - parse_common_location_path = staticmethod(PredictionServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceAsyncClient: The constructed client. - """ - return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceAsyncClient: The constructed client. - """ - return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PredictionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PredictionServiceTransport: - """Returns the transport used by the client instance. - - Returns: - PredictionServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = PredictionServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PredictionServiceTransport, Callable[..., PredictionServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the prediction service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the PredictionServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PredictionServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1beta1.PredictionServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "credentialsType": None, - } - ) - - async def predict(self, - request: Optional[Union[prediction_service.PredictRequest, dict]] = None, - *, - name: Optional[str] = None, - payload: Optional[data_items.ExamplePayload] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> prediction_service.PredictResponse: - r"""Perform an online prediction. The prediction result will be - directly returned in the response. Available for following ML - problems, and their expected request payloads: - - - Image Classification - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Image Object Detection - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Text Classification - TextSnippet, content up to 60,000 - characters, UTF-8 encoded. - - Text Extraction - TextSnippet, content up to 30,000 - characters, UTF-8 NFC encoded. - - Translation - TextSnippet, content up to 25,000 characters, - UTF-8 encoded. - - Tables - Row, with column values matching the columns of the - model, up to 5MB. Not available for FORECASTING - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]. - - - Text Sentiment - TextSnippet, content up 500 characters, - UTF-8 encoded. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - payload = automl_v1beta1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1beta1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = await client.predict(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.PredictRequest, dict]]): - The request object. Request message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - name (:class:`str`): - Required. Name of the model requested - to serve the prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - payload (:class:`google.cloud.automl_v1beta1.types.ExamplePayload`): - Required. Payload to perform a - prediction on. The payload must match - the problem type that the model was - trained to solve. - - This corresponds to the ``payload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (:class:`MutableMapping[str, str]`): - Additional domain-specific parameters, any string must - be up to 25000 characters long. - - - For Image Classification: - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for an image, - it will only produce results that have at least this - confidence score. The default is 0.5. - - - For Image Object Detection: ``score_threshold`` - - (float) When Model detects objects on the image, it - will only produce bounding boxes which have at least - this confidence score. Value in 0 to 1 range, default - is 0.5. ``max_bounding_box_count`` - (int64) No more - than this number of bounding boxes will be returned - in the response. Default is 100, the requested value - may be limited by server. - - - For Tables: feature_importance - (boolean) Whether - feature importance should be populated in the - returned TablesAnnotation. The default is false. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.PredictResponse: - Response message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, payload, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.PredictRequest): - request = prediction_service.PredictRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if payload is not None: - request.payload = payload - - if params: - request.params.update(params) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def batch_predict(self, - request: Optional[Union[prediction_service.BatchPredictRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.BatchPredictInputConfig] = None, - output_config: Optional[io.BatchPredictOutputConfig] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1beta1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1beta1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML problems: - - - Image Classification - - Image Object Detection - - Video Classification - - Video Object Tracking \* Text Extraction - - Tables - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - async def sample_batch_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.BatchPredictRequest( - name="name_value", - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.automl_v1beta1.types.BatchPredictRequest, dict]]): - The request object. Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - name (:class:`str`): - Required. Name of the model requested - to serve the batch prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (:class:`google.cloud.automl_v1beta1.types.BatchPredictInputConfig`): - Required. The input configuration for - batch prediction. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (:class:`google.cloud.automl_v1beta1.types.BatchPredictOutputConfig`): - Required. The Configuration - specifying where output predictions - should be written. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (:class:`MutableMapping[str, str]`): - Required. Additional domain-specific parameters for the - predictions, any string must be up to 25000 characters - long. - - - For Text Classification: - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for a text - snippet, it will only produce results that have at - least this confidence score. The default is 0.5. - - - For Image Classification: - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for an image, - it will only produce results that have at least this - confidence score. The default is 0.5. - - - For Image Object Detection: - - ``score_threshold`` - (float) When Model detects - objects on the image, it will only produce bounding - boxes which have at least this confidence score. - Value in 0 to 1 range, default is 0.5. - ``max_bounding_box_count`` - (int64) No more than - this number of bounding boxes will be produced per - image. Default is 100, the requested value may be - limited by server. - - - For Video Classification : - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for a video, it - will only produce results that have at least this - confidence score. The default is 0.5. - ``segment_classification`` - (boolean) Set to true to - request segment-level classification. AutoML Video - Intelligence returns labels and their confidence - scores for the entire segment of the video that user - specified in the request configuration. The default - is "true". ``shot_classification`` - (boolean) Set to - true to request shot-level classification. AutoML - Video Intelligence determines the boundaries for each - camera shot in the entire segment of the video that - user specified in the request configuration. AutoML - Video Intelligence then returns labels and their - confidence scores for each detected shot, along with - the start and end time of the shot. WARNING: Model - evaluation is not done for this classification type, - the quality of it depends on training data, but there - are no metrics provided to describe that quality. The - default is "false". ``1s_interval_classification`` - - (boolean) Set to true to request classification for a - video at one-second intervals. AutoML Video - Intelligence returns labels and their confidence - scores for each second of the entire segment of the - video that user specified in the request - configuration. WARNING: Model evaluation is not done - for this classification type, the quality of it - depends on training data, but there are no metrics - provided to describe that quality. The default is - "false". - - - For Tables: - - feature_importance - (boolean) Whether feature - importance should be populated in the returned - TablesAnnotations. The default is false. - - - For Video Object Tracking: - - ``score_threshold`` - (float) When Model detects - objects on video frames, it will only produce - bounding boxes which have at least this confidence - score. Value in 0 to 1 range, default is 0.5. - ``max_bounding_box_count`` - (int64) No more than - this number of bounding boxes will be returned per - frame. Default is 100, the requested value may be - limited by server. ``min_bounding_box_size`` - - (float) Only bounding boxes with shortest edge at - least that long as a relative value of video frame - size will be returned. Value in 0 to 1 range. Default - is 0. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.automl_v1beta1.types.BatchPredictResult` Result of the Batch Predict. This message is returned in - [response][google.longrunning.Operation.response] of - the operation returned by the - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config, output_config, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.BatchPredictRequest): - request = prediction_service.BatchPredictRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - if output_config is not None: - request.output_config = output_config - - if params: - request.params.update(params) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.batch_predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - prediction_service.BatchPredictResult, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "PredictionServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PredictionServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/client.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/client.py deleted file mode 100644 index 93fbc62d1307..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/client.py +++ /dev/null @@ -1,1051 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.automl_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.automl_v1beta1.types import annotation_payload -from google.cloud.automl_v1beta1.types import data_items -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import operations -from google.cloud.automl_v1beta1.types import prediction_service -from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PredictionServiceGrpcTransport -from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport -from .transports.rest import PredictionServiceRestTransport - - -class PredictionServiceClientMeta(type): - """Metaclass for the PredictionService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] - _transport_registry["grpc"] = PredictionServiceGrpcTransport - _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport - _transport_registry["rest"] = PredictionServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[PredictionServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PredictionServiceClient(metaclass=PredictionServiceClientMeta): - """AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "automl.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "automl.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PredictionServiceTransport: - """Returns the transport used by the client instance. - - Returns: - PredictionServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def model_path(project: str,location: str,model: str,) -> str: - """Returns a fully-qualified model string.""" - return "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - - @staticmethod - def parse_model_path(path: str) -> Dict[str,str]: - """Parses a model path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/models/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = PredictionServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PredictionServiceTransport, Callable[..., PredictionServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the prediction service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,PredictionServiceTransport,Callable[..., PredictionServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the PredictionServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = PredictionServiceClient._read_environment_variables() - self._client_cert_source = PredictionServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = PredictionServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, PredictionServiceTransport) - if transport_provided: - # transport is a PredictionServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(PredictionServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - PredictionServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[PredictionServiceTransport], Callable[..., PredictionServiceTransport]] = ( - PredictionServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., PredictionServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.automl_v1beta1.PredictionServiceClient`.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "credentialsType": None, - } - ) - - def predict(self, - request: Optional[Union[prediction_service.PredictRequest, dict]] = None, - *, - name: Optional[str] = None, - payload: Optional[data_items.ExamplePayload] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> prediction_service.PredictResponse: - r"""Perform an online prediction. The prediction result will be - directly returned in the response. Available for following ML - problems, and their expected request payloads: - - - Image Classification - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Image Object Detection - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Text Classification - TextSnippet, content up to 60,000 - characters, UTF-8 encoded. - - Text Extraction - TextSnippet, content up to 30,000 - characters, UTF-8 NFC encoded. - - Translation - TextSnippet, content up to 25,000 characters, - UTF-8 encoded. - - Tables - Row, with column values matching the columns of the - model, up to 5MB. Not available for FORECASTING - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]. - - - Text Sentiment - TextSnippet, content up 500 characters, - UTF-8 encoded. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceClient() - - # Initialize request argument(s) - payload = automl_v1beta1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1beta1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = client.predict(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.PredictRequest, dict]): - The request object. Request message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - name (str): - Required. Name of the model requested - to serve the prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - payload (google.cloud.automl_v1beta1.types.ExamplePayload): - Required. Payload to perform a - prediction on. The payload must match - the problem type that the model was - trained to solve. - - This corresponds to the ``payload`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (MutableMapping[str, str]): - Additional domain-specific parameters, any string must - be up to 25000 characters long. - - - For Image Classification: - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for an image, - it will only produce results that have at least this - confidence score. The default is 0.5. - - - For Image Object Detection: ``score_threshold`` - - (float) When Model detects objects on the image, it - will only produce bounding boxes which have at least - this confidence score. Value in 0 to 1 range, default - is 0.5. ``max_bounding_box_count`` - (int64) No more - than this number of bounding boxes will be returned - in the response. Default is 100, the requested value - may be limited by server. - - - For Tables: feature_importance - (boolean) Whether - feature importance should be populated in the - returned TablesAnnotation. The default is false. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.automl_v1beta1.types.PredictResponse: - Response message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, payload, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.PredictRequest): - request = prediction_service.PredictRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if payload is not None: - request.payload = payload - if params is not None: - request.params = params - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def batch_predict(self, - request: Optional[Union[prediction_service.BatchPredictRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[io.BatchPredictInputConfig] = None, - output_config: Optional[io.BatchPredictOutputConfig] = None, - params: Optional[MutableMapping[str, str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1beta1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1beta1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML problems: - - - Image Classification - - Image Object Detection - - Video Classification - - Video Object Tracking \* Text Extraction - - Tables - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import automl_v1beta1 - - def sample_batch_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceClient() - - # Initialize request argument(s) - request = automl_v1beta1.BatchPredictRequest( - name="name_value", - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.automl_v1beta1.types.BatchPredictRequest, dict]): - The request object. Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - name (str): - Required. Name of the model requested - to serve the batch prediction. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - input_config (google.cloud.automl_v1beta1.types.BatchPredictInputConfig): - Required. The input configuration for - batch prediction. - - This corresponds to the ``input_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - output_config (google.cloud.automl_v1beta1.types.BatchPredictOutputConfig): - Required. The Configuration - specifying where output predictions - should be written. - - This corresponds to the ``output_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - params (MutableMapping[str, str]): - Required. Additional domain-specific parameters for the - predictions, any string must be up to 25000 characters - long. - - - For Text Classification: - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for a text - snippet, it will only produce results that have at - least this confidence score. The default is 0.5. - - - For Image Classification: - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for an image, - it will only produce results that have at least this - confidence score. The default is 0.5. - - - For Image Object Detection: - - ``score_threshold`` - (float) When Model detects - objects on the image, it will only produce bounding - boxes which have at least this confidence score. - Value in 0 to 1 range, default is 0.5. - ``max_bounding_box_count`` - (int64) No more than - this number of bounding boxes will be produced per - image. Default is 100, the requested value may be - limited by server. - - - For Video Classification : - - ``score_threshold`` - (float) A value from 0.0 to - 1.0. When the model makes predictions for a video, it - will only produce results that have at least this - confidence score. The default is 0.5. - ``segment_classification`` - (boolean) Set to true to - request segment-level classification. AutoML Video - Intelligence returns labels and their confidence - scores for the entire segment of the video that user - specified in the request configuration. The default - is "true". ``shot_classification`` - (boolean) Set to - true to request shot-level classification. AutoML - Video Intelligence determines the boundaries for each - camera shot in the entire segment of the video that - user specified in the request configuration. AutoML - Video Intelligence then returns labels and their - confidence scores for each detected shot, along with - the start and end time of the shot. WARNING: Model - evaluation is not done for this classification type, - the quality of it depends on training data, but there - are no metrics provided to describe that quality. The - default is "false". ``1s_interval_classification`` - - (boolean) Set to true to request classification for a - video at one-second intervals. AutoML Video - Intelligence returns labels and their confidence - scores for each second of the entire segment of the - video that user specified in the request - configuration. WARNING: Model evaluation is not done - for this classification type, the quality of it - depends on training data, but there are no metrics - provided to describe that quality. The default is - "false". - - - For Tables: - - feature_importance - (boolean) Whether feature - importance should be populated in the returned - TablesAnnotations. The default is false. - - - For Video Object Tracking: - - ``score_threshold`` - (float) When Model detects - objects on video frames, it will only produce - bounding boxes which have at least this confidence - score. Value in 0 to 1 range, default is 0.5. - ``max_bounding_box_count`` - (int64) No more than - this number of bounding boxes will be returned per - frame. Default is 100, the requested value may be - limited by server. ``min_bounding_box_size`` - - (float) Only bounding boxes with shortest edge at - least that long as a relative value of video frame - size will be returned. Value in 0 to 1 range. Default - is 0. - - This corresponds to the ``params`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.automl_v1beta1.types.BatchPredictResult` Result of the Batch Predict. This message is returned in - [response][google.longrunning.Operation.response] of - the operation returned by the - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, input_config, output_config, params] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, prediction_service.BatchPredictRequest): - request = prediction_service.BatchPredictRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if input_config is not None: - request.input_config = input_config - if output_config is not None: - request.output_config = output_config - if params is not None: - request.params = params - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.batch_predict] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - prediction_service.BatchPredictResult, - metadata_type=operations.OperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "PredictionServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "PredictionServiceClient", -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/README.rst b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/README.rst deleted file mode 100644 index 504aaca0a144..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`PredictionServiceTransport` is the ABC for all transports. -- public child `PredictionServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `PredictionServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BasePredictionServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `PredictionServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/__init__.py deleted file mode 100644 index 5202ac1c1205..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PredictionServiceTransport -from .grpc import PredictionServiceGrpcTransport -from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport -from .rest import PredictionServiceRestTransport -from .rest import PredictionServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]] -_transport_registry['grpc'] = PredictionServiceGrpcTransport -_transport_registry['grpc_asyncio'] = PredictionServiceGrpcAsyncIOTransport -_transport_registry['rest'] = PredictionServiceRestTransport - -__all__ = ( - 'PredictionServiceTransport', - 'PredictionServiceGrpcTransport', - 'PredictionServiceGrpcAsyncIOTransport', - 'PredictionServiceRestTransport', - 'PredictionServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/base.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/base.py deleted file mode 100644 index a581b2f0042f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/base.py +++ /dev/null @@ -1,175 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.automl_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.automl_v1beta1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class PredictionServiceTransport(abc.ABC): - """Abstract transport class for PredictionService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'automl.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.predict: gapic_v1.method.wrap_method( - self.predict, - default_timeout=60.0, - client_info=client_info, - ), - self.batch_predict: gapic_v1.method.wrap_method( - self.batch_predict, - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - Union[ - prediction_service.PredictResponse, - Awaitable[prediction_service.PredictResponse] - ]]: - raise NotImplementedError() - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PredictionServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/grpc.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/grpc.py deleted file mode 100644 index 0d2cea306427..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/grpc.py +++ /dev/null @@ -1,431 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class PredictionServiceGrpcTransport(PredictionServiceTransport): - """gRPC backend transport for PredictionService. - - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - prediction_service.PredictResponse]: - r"""Return a callable for the predict method over gRPC. - - Perform an online prediction. The prediction result will be - directly returned in the response. Available for following ML - problems, and their expected request payloads: - - - Image Classification - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Image Object Detection - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Text Classification - TextSnippet, content up to 60,000 - characters, UTF-8 encoded. - - Text Extraction - TextSnippet, content up to 30,000 - characters, UTF-8 NFC encoded. - - Translation - TextSnippet, content up to 25,000 characters, - UTF-8 encoded. - - Tables - Row, with column values matching the columns of the - model, up to 5MB. Not available for FORECASTING - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]. - - - Text Sentiment - TextSnippet, content up 500 characters, - UTF-8 encoded. - - Returns: - Callable[[~.PredictRequest], - ~.PredictResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'predict' not in self._stubs: - self._stubs['predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.PredictionService/Predict', - request_serializer=prediction_service.PredictRequest.serialize, - response_deserializer=prediction_service.PredictResponse.deserialize, - ) - return self._stubs['predict'] - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - operations_pb2.Operation]: - r"""Return a callable for the batch predict method over gRPC. - - Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1beta1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1beta1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML problems: - - - Image Classification - - Image Object Detection - - Video Classification - - Video Object Tracking \* Text Extraction - - Tables - - Returns: - Callable[[~.BatchPredictRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_predict' not in self._stubs: - self._stubs['batch_predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.PredictionService/BatchPredict', - request_serializer=prediction_service.BatchPredictRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_predict'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PredictionServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/grpc_asyncio.py deleted file mode 100644 index 241f8e55ca9d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,456 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.automl_v1beta1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import PredictionServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport): - """gRPC AsyncIO backend transport for PredictionService. - - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - Awaitable[prediction_service.PredictResponse]]: - r"""Return a callable for the predict method over gRPC. - - Perform an online prediction. The prediction result will be - directly returned in the response. Available for following ML - problems, and their expected request payloads: - - - Image Classification - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Image Object Detection - Image in .JPEG, .GIF or .PNG format, - image_bytes up to 30MB. - - Text Classification - TextSnippet, content up to 60,000 - characters, UTF-8 encoded. - - Text Extraction - TextSnippet, content up to 30,000 - characters, UTF-8 NFC encoded. - - Translation - TextSnippet, content up to 25,000 characters, - UTF-8 encoded. - - Tables - Row, with column values matching the columns of the - model, up to 5MB. Not available for FORECASTING - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]. - - - Text Sentiment - TextSnippet, content up 500 characters, - UTF-8 encoded. - - Returns: - Callable[[~.PredictRequest], - Awaitable[~.PredictResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'predict' not in self._stubs: - self._stubs['predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.PredictionService/Predict', - request_serializer=prediction_service.PredictRequest.serialize, - response_deserializer=prediction_service.PredictResponse.deserialize, - ) - return self._stubs['predict'] - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the batch predict method over gRPC. - - Perform a batch prediction. Unlike the online - [Predict][google.cloud.automl.v1beta1.PredictionService.Predict], - batch prediction result won't be immediately available in the - response. Instead, a long running operation object is returned. - User can poll the operation result via - [GetOperation][google.longrunning.Operations.GetOperation] - method. Once the operation is done, - [BatchPredictResult][google.cloud.automl.v1beta1.BatchPredictResult] - is returned in the - [response][google.longrunning.Operation.response] field. - Available for following ML problems: - - - Image Classification - - Image Object Detection - - Video Classification - - Video Object Tracking \* Text Extraction - - Tables - - Returns: - Callable[[~.BatchPredictRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'batch_predict' not in self._stubs: - self._stubs['batch_predict'] = self._logged_channel.unary_unary( - '/google.cloud.automl.v1beta1.PredictionService/BatchPredict', - request_serializer=prediction_service.BatchPredictRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['batch_predict'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.predict: self._wrap_method( - self.predict, - default_timeout=60.0, - client_info=client_info, - ), - self.batch_predict: self._wrap_method( - self.batch_predict, - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'PredictionServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py deleted file mode 100644 index f5cb0a3ae4ad..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/rest.py +++ /dev/null @@ -1,592 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.automl_v1beta1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BasePredictionServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class PredictionServiceRestInterceptor: - """Interceptor for PredictionService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the PredictionServiceRestTransport. - - .. code-block:: python - class MyCustomPredictionServiceInterceptor(PredictionServiceRestInterceptor): - def pre_batch_predict(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_batch_predict(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_predict(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_predict(self, response): - logging.log(f"Received response: {response}") - return response - - transport = PredictionServiceRestTransport(interceptor=MyCustomPredictionServiceInterceptor()) - client = PredictionServiceClient(transport=transport) - - - """ - def pre_batch_predict(self, request: prediction_service.BatchPredictRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[prediction_service.BatchPredictRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for batch_predict - - Override in a subclass to manipulate the request or metadata - before they are sent to the PredictionService server. - """ - return request, metadata - - def post_batch_predict(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for batch_predict - - DEPRECATED. Please use the `post_batch_predict_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the PredictionService server but before - it is returned to user code. This `post_batch_predict` interceptor runs - before the `post_batch_predict_with_metadata` interceptor. - """ - return response - - def post_batch_predict_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for batch_predict - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the PredictionService server but before it is returned to user code. - - We recommend only using this `post_batch_predict_with_metadata` - interceptor in new development instead of the `post_batch_predict` interceptor. - When both interceptors are used, this `post_batch_predict_with_metadata` interceptor runs after the - `post_batch_predict` interceptor. The (possibly modified) response returned by - `post_batch_predict` will be passed to - `post_batch_predict_with_metadata`. - """ - return response, metadata - - def pre_predict(self, request: prediction_service.PredictRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[prediction_service.PredictRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for predict - - Override in a subclass to manipulate the request or metadata - before they are sent to the PredictionService server. - """ - return request, metadata - - def post_predict(self, response: prediction_service.PredictResponse) -> prediction_service.PredictResponse: - """Post-rpc interceptor for predict - - DEPRECATED. Please use the `post_predict_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the PredictionService server but before - it is returned to user code. This `post_predict` interceptor runs - before the `post_predict_with_metadata` interceptor. - """ - return response - - def post_predict_with_metadata(self, response: prediction_service.PredictResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[prediction_service.PredictResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for predict - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the PredictionService server but before it is returned to user code. - - We recommend only using this `post_predict_with_metadata` - interceptor in new development instead of the `post_predict` interceptor. - When both interceptors are used, this `post_predict_with_metadata` interceptor runs after the - `post_predict` interceptor. The (possibly modified) response returned by - `post_predict` will be passed to - `post_predict_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class PredictionServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: PredictionServiceRestInterceptor - - -class PredictionServiceRestTransport(_BasePredictionServiceRestTransport): - """REST backend synchronous transport for PredictionService. - - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[PredictionServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or PredictionServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1beta1/{name=projects/*/locations/*}/operations', - }, - ], - 'google.longrunning.Operations.WaitOperation': [ - { - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1beta1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _BatchPredict(_BasePredictionServiceRestTransport._BaseBatchPredict, PredictionServiceRestStub): - def __hash__(self): - return hash("PredictionServiceRestTransport.BatchPredict") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: prediction_service.BatchPredictRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the batch predict method over HTTP. - - Args: - request (~.prediction_service.BatchPredictRequest): - The request object. Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BasePredictionServiceRestTransport._BaseBatchPredict._get_http_options() - - request, metadata = self._interceptor.pre_batch_predict(request, metadata) - transcoded_request = _BasePredictionServiceRestTransport._BaseBatchPredict._get_transcoded_request(http_options, request) - - body = _BasePredictionServiceRestTransport._BaseBatchPredict._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BasePredictionServiceRestTransport._BaseBatchPredict._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.PredictionServiceClient.BatchPredict", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": "BatchPredict", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = PredictionServiceRestTransport._BatchPredict._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_batch_predict(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_batch_predict_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.PredictionServiceClient.batch_predict", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": "BatchPredict", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _Predict(_BasePredictionServiceRestTransport._BasePredict, PredictionServiceRestStub): - def __hash__(self): - return hash("PredictionServiceRestTransport.Predict") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: prediction_service.PredictRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> prediction_service.PredictResponse: - r"""Call the predict method over HTTP. - - Args: - request (~.prediction_service.PredictRequest): - The request object. Request message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.prediction_service.PredictResponse: - Response message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - - """ - - http_options = _BasePredictionServiceRestTransport._BasePredict._get_http_options() - - request, metadata = self._interceptor.pre_predict(request, metadata) - transcoded_request = _BasePredictionServiceRestTransport._BasePredict._get_transcoded_request(http_options, request) - - body = _BasePredictionServiceRestTransport._BasePredict._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BasePredictionServiceRestTransport._BasePredict._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.automl_v1beta1.PredictionServiceClient.Predict", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": "Predict", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = PredictionServiceRestTransport._Predict._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = prediction_service.PredictResponse() - pb_resp = prediction_service.PredictResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_predict(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_predict_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = prediction_service.PredictResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.automl_v1beta1.PredictionServiceClient.predict", - extra = { - "serviceName": "google.cloud.automl.v1beta1.PredictionService", - "rpcName": "Predict", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def batch_predict(self) -> Callable[ - [prediction_service.BatchPredictRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._BatchPredict(self._session, self._host, self._interceptor) # type: ignore - - @property - def predict(self) -> Callable[ - [prediction_service.PredictRequest], - prediction_service.PredictResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._Predict(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'PredictionServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/rest_base.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/rest_base.py deleted file mode 100644 index 78be6c163890..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/services/prediction_service/transports/rest_base.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.automl_v1beta1.types import prediction_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BasePredictionServiceRestTransport(PredictionServiceTransport): - """Base REST backend transport for PredictionService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'automl.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'automl.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseBatchPredict: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}:batchPredict', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = prediction_service.BatchPredictRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BasePredictionServiceRestTransport._BaseBatchPredict._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BasePredict: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1beta1/{name=projects/*/locations/*/models/*}:predict', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = prediction_service.PredictRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BasePredictionServiceRestTransport._BasePredict._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BasePredictionServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/__init__.py deleted file mode 100644 index 90bf2701b34d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/__init__.py +++ /dev/null @@ -1,318 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .annotation_payload import ( - AnnotationPayload, -) -from .annotation_spec import ( - AnnotationSpec, -) -from .classification import ( - ClassificationAnnotation, - ClassificationEvaluationMetrics, - VideoClassificationAnnotation, - ClassificationType, -) -from .column_spec import ( - ColumnSpec, -) -from .data_items import ( - Document, - DocumentDimensions, - ExamplePayload, - Image, - Row, - TextSnippet, -) -from .data_stats import ( - ArrayStats, - CategoryStats, - CorrelationStats, - DataStats, - Float64Stats, - StringStats, - StructStats, - TimestampStats, -) -from .data_types import ( - DataType, - StructType, - TypeCode, -) -from .dataset import ( - Dataset, -) -from .detection import ( - BoundingBoxMetricsEntry, - ImageObjectDetectionAnnotation, - ImageObjectDetectionEvaluationMetrics, - VideoObjectTrackingAnnotation, - VideoObjectTrackingEvaluationMetrics, -) -from .geometry import ( - BoundingPoly, - NormalizedVertex, -) -from .image import ( - ImageClassificationDatasetMetadata, - ImageClassificationModelDeploymentMetadata, - ImageClassificationModelMetadata, - ImageObjectDetectionDatasetMetadata, - ImageObjectDetectionModelDeploymentMetadata, - ImageObjectDetectionModelMetadata, -) -from .io import ( - BatchPredictInputConfig, - BatchPredictOutputConfig, - BigQueryDestination, - BigQuerySource, - DocumentInputConfig, - ExportEvaluatedExamplesOutputConfig, - GcrDestination, - GcsDestination, - GcsSource, - InputConfig, - ModelExportOutputConfig, - OutputConfig, -) -from .model import ( - Model, -) -from .model_evaluation import ( - ModelEvaluation, -) -from .operations import ( - BatchPredictOperationMetadata, - CreateModelOperationMetadata, - DeleteOperationMetadata, - DeployModelOperationMetadata, - ExportDataOperationMetadata, - ExportEvaluatedExamplesOperationMetadata, - ExportModelOperationMetadata, - ImportDataOperationMetadata, - OperationMetadata, - UndeployModelOperationMetadata, -) -from .prediction_service import ( - BatchPredictRequest, - BatchPredictResult, - PredictRequest, - PredictResponse, -) -from .ranges import ( - DoubleRange, -) -from .regression import ( - RegressionEvaluationMetrics, -) -from .service import ( - CreateDatasetRequest, - CreateModelRequest, - DeleteDatasetRequest, - DeleteModelRequest, - DeployModelRequest, - ExportDataRequest, - ExportEvaluatedExamplesRequest, - ExportModelRequest, - GetAnnotationSpecRequest, - GetColumnSpecRequest, - GetDatasetRequest, - GetModelEvaluationRequest, - GetModelRequest, - GetTableSpecRequest, - ImportDataRequest, - ListColumnSpecsRequest, - ListColumnSpecsResponse, - ListDatasetsRequest, - ListDatasetsResponse, - ListModelEvaluationsRequest, - ListModelEvaluationsResponse, - ListModelsRequest, - ListModelsResponse, - ListTableSpecsRequest, - ListTableSpecsResponse, - UndeployModelRequest, - UpdateColumnSpecRequest, - UpdateDatasetRequest, - UpdateTableSpecRequest, -) -from .table_spec import ( - TableSpec, -) -from .tables import ( - TablesAnnotation, - TablesDatasetMetadata, - TablesModelColumnInfo, - TablesModelMetadata, -) -from .temporal import ( - TimeSegment, -) -from .text import ( - TextClassificationDatasetMetadata, - TextClassificationModelMetadata, - TextExtractionDatasetMetadata, - TextExtractionModelMetadata, - TextSentimentDatasetMetadata, - TextSentimentModelMetadata, -) -from .text_extraction import ( - TextExtractionAnnotation, - TextExtractionEvaluationMetrics, -) -from .text_segment import ( - TextSegment, -) -from .text_sentiment import ( - TextSentimentAnnotation, - TextSentimentEvaluationMetrics, -) -from .translation import ( - TranslationAnnotation, - TranslationDatasetMetadata, - TranslationEvaluationMetrics, - TranslationModelMetadata, -) -from .video import ( - VideoClassificationDatasetMetadata, - VideoClassificationModelMetadata, - VideoObjectTrackingDatasetMetadata, - VideoObjectTrackingModelMetadata, -) - -__all__ = ( - 'AnnotationPayload', - 'AnnotationSpec', - 'ClassificationAnnotation', - 'ClassificationEvaluationMetrics', - 'VideoClassificationAnnotation', - 'ClassificationType', - 'ColumnSpec', - 'Document', - 'DocumentDimensions', - 'ExamplePayload', - 'Image', - 'Row', - 'TextSnippet', - 'ArrayStats', - 'CategoryStats', - 'CorrelationStats', - 'DataStats', - 'Float64Stats', - 'StringStats', - 'StructStats', - 'TimestampStats', - 'DataType', - 'StructType', - 'TypeCode', - 'Dataset', - 'BoundingBoxMetricsEntry', - 'ImageObjectDetectionAnnotation', - 'ImageObjectDetectionEvaluationMetrics', - 'VideoObjectTrackingAnnotation', - 'VideoObjectTrackingEvaluationMetrics', - 'BoundingPoly', - 'NormalizedVertex', - 'ImageClassificationDatasetMetadata', - 'ImageClassificationModelDeploymentMetadata', - 'ImageClassificationModelMetadata', - 'ImageObjectDetectionDatasetMetadata', - 'ImageObjectDetectionModelDeploymentMetadata', - 'ImageObjectDetectionModelMetadata', - 'BatchPredictInputConfig', - 'BatchPredictOutputConfig', - 'BigQueryDestination', - 'BigQuerySource', - 'DocumentInputConfig', - 'ExportEvaluatedExamplesOutputConfig', - 'GcrDestination', - 'GcsDestination', - 'GcsSource', - 'InputConfig', - 'ModelExportOutputConfig', - 'OutputConfig', - 'Model', - 'ModelEvaluation', - 'BatchPredictOperationMetadata', - 'CreateModelOperationMetadata', - 'DeleteOperationMetadata', - 'DeployModelOperationMetadata', - 'ExportDataOperationMetadata', - 'ExportEvaluatedExamplesOperationMetadata', - 'ExportModelOperationMetadata', - 'ImportDataOperationMetadata', - 'OperationMetadata', - 'UndeployModelOperationMetadata', - 'BatchPredictRequest', - 'BatchPredictResult', - 'PredictRequest', - 'PredictResponse', - 'DoubleRange', - 'RegressionEvaluationMetrics', - 'CreateDatasetRequest', - 'CreateModelRequest', - 'DeleteDatasetRequest', - 'DeleteModelRequest', - 'DeployModelRequest', - 'ExportDataRequest', - 'ExportEvaluatedExamplesRequest', - 'ExportModelRequest', - 'GetAnnotationSpecRequest', - 'GetColumnSpecRequest', - 'GetDatasetRequest', - 'GetModelEvaluationRequest', - 'GetModelRequest', - 'GetTableSpecRequest', - 'ImportDataRequest', - 'ListColumnSpecsRequest', - 'ListColumnSpecsResponse', - 'ListDatasetsRequest', - 'ListDatasetsResponse', - 'ListModelEvaluationsRequest', - 'ListModelEvaluationsResponse', - 'ListModelsRequest', - 'ListModelsResponse', - 'ListTableSpecsRequest', - 'ListTableSpecsResponse', - 'UndeployModelRequest', - 'UpdateColumnSpecRequest', - 'UpdateDatasetRequest', - 'UpdateTableSpecRequest', - 'TableSpec', - 'TablesAnnotation', - 'TablesDatasetMetadata', - 'TablesModelColumnInfo', - 'TablesModelMetadata', - 'TimeSegment', - 'TextClassificationDatasetMetadata', - 'TextClassificationModelMetadata', - 'TextExtractionDatasetMetadata', - 'TextExtractionModelMetadata', - 'TextSentimentDatasetMetadata', - 'TextSentimentModelMetadata', - 'TextExtractionAnnotation', - 'TextExtractionEvaluationMetrics', - 'TextSegment', - 'TextSentimentAnnotation', - 'TextSentimentEvaluationMetrics', - 'TranslationAnnotation', - 'TranslationDatasetMetadata', - 'TranslationEvaluationMetrics', - 'TranslationModelMetadata', - 'VideoClassificationDatasetMetadata', - 'VideoClassificationModelMetadata', - 'VideoObjectTrackingDatasetMetadata', - 'VideoObjectTrackingModelMetadata', -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/annotation_payload.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/annotation_payload.py deleted file mode 100644 index 3d72bdf9b550..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/annotation_payload.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import classification as gca_classification -from google.cloud.automl_v1beta1.types import detection -from google.cloud.automl_v1beta1.types import tables as gca_tables -from google.cloud.automl_v1beta1.types import text_extraction as gca_text_extraction -from google.cloud.automl_v1beta1.types import text_sentiment as gca_text_sentiment -from google.cloud.automl_v1beta1.types import translation as gca_translation - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'AnnotationPayload', - }, -) - - -class AnnotationPayload(proto.Message): - r"""Contains annotation information that is relevant to AutoML. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation (google.cloud.automl_v1beta1.types.TranslationAnnotation): - Annotation details for translation. - - This field is a member of `oneof`_ ``detail``. - classification (google.cloud.automl_v1beta1.types.ClassificationAnnotation): - Annotation details for content or image - classification. - - This field is a member of `oneof`_ ``detail``. - image_object_detection (google.cloud.automl_v1beta1.types.ImageObjectDetectionAnnotation): - Annotation details for image object - detection. - - This field is a member of `oneof`_ ``detail``. - video_classification (google.cloud.automl_v1beta1.types.VideoClassificationAnnotation): - Annotation details for video classification. - Returned for Video Classification predictions. - - This field is a member of `oneof`_ ``detail``. - video_object_tracking (google.cloud.automl_v1beta1.types.VideoObjectTrackingAnnotation): - Annotation details for video object tracking. - - This field is a member of `oneof`_ ``detail``. - text_extraction (google.cloud.automl_v1beta1.types.TextExtractionAnnotation): - Annotation details for text extraction. - - This field is a member of `oneof`_ ``detail``. - text_sentiment (google.cloud.automl_v1beta1.types.TextSentimentAnnotation): - Annotation details for text sentiment. - - This field is a member of `oneof`_ ``detail``. - tables (google.cloud.automl_v1beta1.types.TablesAnnotation): - Annotation details for Tables. - - This field is a member of `oneof`_ ``detail``. - annotation_spec_id (str): - Output only . The resource ID of the - annotation spec that this annotation pertains - to. The annotation spec comes from either an - ancestor dataset, or the dataset that was used - to train the model in use. - display_name (str): - Output only. The value of - [display_name][google.cloud.automl.v1beta1.AnnotationSpec.display_name] - when the model was trained. Because this field returns a - value at model training time, for different models trained - using the same dataset, the returned value could be - different as model owner could update the ``display_name`` - between any two model training. - """ - - translation: gca_translation.TranslationAnnotation = proto.Field( - proto.MESSAGE, - number=2, - oneof='detail', - message=gca_translation.TranslationAnnotation, - ) - classification: gca_classification.ClassificationAnnotation = proto.Field( - proto.MESSAGE, - number=3, - oneof='detail', - message=gca_classification.ClassificationAnnotation, - ) - image_object_detection: detection.ImageObjectDetectionAnnotation = proto.Field( - proto.MESSAGE, - number=4, - oneof='detail', - message=detection.ImageObjectDetectionAnnotation, - ) - video_classification: gca_classification.VideoClassificationAnnotation = proto.Field( - proto.MESSAGE, - number=9, - oneof='detail', - message=gca_classification.VideoClassificationAnnotation, - ) - video_object_tracking: detection.VideoObjectTrackingAnnotation = proto.Field( - proto.MESSAGE, - number=8, - oneof='detail', - message=detection.VideoObjectTrackingAnnotation, - ) - text_extraction: gca_text_extraction.TextExtractionAnnotation = proto.Field( - proto.MESSAGE, - number=6, - oneof='detail', - message=gca_text_extraction.TextExtractionAnnotation, - ) - text_sentiment: gca_text_sentiment.TextSentimentAnnotation = proto.Field( - proto.MESSAGE, - number=7, - oneof='detail', - message=gca_text_sentiment.TextSentimentAnnotation, - ) - tables: gca_tables.TablesAnnotation = proto.Field( - proto.MESSAGE, - number=10, - oneof='detail', - message=gca_tables.TablesAnnotation, - ) - annotation_spec_id: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/annotation_spec.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/annotation_spec.py deleted file mode 100644 index d6858095bc01..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/annotation_spec.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'AnnotationSpec', - }, -) - - -class AnnotationSpec(proto.Message): - r"""A definition of an annotation spec. - - Attributes: - name (str): - Output only. Resource name of the annotation spec. Form: - - 'projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationSpecs/{annotation_spec_id}' - display_name (str): - Required. The name of the annotation spec to show in the - interface. The name can be up to 32 characters long and must - match the regexp ``[a-zA-Z0-9_]+``. - example_count (int): - Output only. The number of examples in the - parent dataset labeled by the annotation spec. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - example_count: int = proto.Field( - proto.INT32, - number=9, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/classification.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/classification.py deleted file mode 100644 index ad7be3afe244..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/classification.py +++ /dev/null @@ -1,379 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import temporal - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'ClassificationType', - 'ClassificationAnnotation', - 'VideoClassificationAnnotation', - 'ClassificationEvaluationMetrics', - }, -) - - -class ClassificationType(proto.Enum): - r"""Type of the classification problem. - - Values: - CLASSIFICATION_TYPE_UNSPECIFIED (0): - An un-set value of this enum. - MULTICLASS (1): - At most one label is allowed per example. - MULTILABEL (2): - Multiple labels are allowed for one example. - """ - CLASSIFICATION_TYPE_UNSPECIFIED = 0 - MULTICLASS = 1 - MULTILABEL = 2 - - -class ClassificationAnnotation(proto.Message): - r"""Contains annotation details specific to classification. - - Attributes: - score (float): - Output only. A confidence estimate between - 0.0 and 1.0. A higher value means greater - confidence that the annotation is positive. If a - user approves an annotation as negative or - positive, the score value remains unchanged. If - a user creates an annotation, the score is 0 for - negative or 1 for positive. - """ - - score: float = proto.Field( - proto.FLOAT, - number=1, - ) - - -class VideoClassificationAnnotation(proto.Message): - r"""Contains annotation details specific to video classification. - - Attributes: - type_ (str): - Output only. Expresses the type of video classification. - Possible values: - - - ``segment`` - Classification done on a specified by user - time segment of a video. AnnotationSpec is answered to be - present in that time segment, if it is present in any - part of it. The video ML model evaluations are done only - for this type of classification. - - - ``shot``- Shot-level classification. AutoML Video - Intelligence determines the boundaries for each camera - shot in the entire segment of the video that user - specified in the request configuration. AutoML Video - Intelligence then returns labels and their confidence - scores for each detected shot, along with the start and - end time of the shot. WARNING: Model evaluation is not - done for this classification type, the quality of it - depends on training data, but there are no metrics - provided to describe that quality. - - - ``1s_interval`` - AutoML Video Intelligence returns - labels and their confidence scores for each second of the - entire segment of the video that user specified in the - request configuration. WARNING: Model evaluation is not - done for this classification type, the quality of it - depends on training data, but there are no metrics - provided to describe that quality. - classification_annotation (google.cloud.automl_v1beta1.types.ClassificationAnnotation): - Output only . The classification details of - this annotation. - time_segment (google.cloud.automl_v1beta1.types.TimeSegment): - Output only . The time segment of the video - to which the annotation applies. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - classification_annotation: 'ClassificationAnnotation' = proto.Field( - proto.MESSAGE, - number=2, - message='ClassificationAnnotation', - ) - time_segment: temporal.TimeSegment = proto.Field( - proto.MESSAGE, - number=3, - message=temporal.TimeSegment, - ) - - -class ClassificationEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for classification problems. Note: For - Video Classification this metrics only describe quality of the Video - Classification predictions of "segment_classification" type. - - Attributes: - au_prc (float): - Output only. The Area Under Precision-Recall - Curve metric. Micro-averaged for the overall - evaluation. - base_au_prc (float): - Output only. The Area Under Precision-Recall - Curve metric based on priors. Micro-averaged for - the overall evaluation. Deprecated. - au_roc (float): - Output only. The Area Under Receiver - Operating Characteristic curve metric. - Micro-averaged for the overall evaluation. - log_loss (float): - Output only. The Log Loss metric. - confidence_metrics_entry (MutableSequence[google.cloud.automl_v1beta1.types.ClassificationEvaluationMetrics.ConfidenceMetricsEntry]): - Output only. Metrics for each confidence_threshold in - 0.00,0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 and - position_threshold = INT32_MAX_VALUE. ROC and - precision-recall curves, and other aggregated metrics are - derived from them. The confidence metrics entries may also - be supplied for additional values of position_threshold, but - from these no aggregated metrics are computed. - confusion_matrix (google.cloud.automl_v1beta1.types.ClassificationEvaluationMetrics.ConfusionMatrix): - Output only. Confusion matrix of the - evaluation. Only set for MULTICLASS - classification problems where number of labels - is no more than 10. - Only set for model level evaluation, not for - evaluation per label. - annotation_spec_id (MutableSequence[str]): - Output only. The annotation spec ids used for - this evaluation. - """ - - class ConfidenceMetricsEntry(proto.Message): - r"""Metrics for a single confidence threshold. - - Attributes: - confidence_threshold (float): - Output only. Metrics are computed with an - assumption that the model never returns - predictions with score lower than this value. - position_threshold (int): - Output only. Metrics are computed with an assumption that - the model always returns at most this many predictions - (ordered by their score, descendingly), but they all still - need to meet the confidence_threshold. - recall (float): - Output only. Recall (True Positive Rate) for - the given confidence threshold. - precision (float): - Output only. Precision for the given - confidence threshold. - false_positive_rate (float): - Output only. False Positive Rate for the - given confidence threshold. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - recall_at1 (float): - Output only. The Recall (True Positive Rate) - when only considering the label that has the - highest prediction score and not below the - confidence threshold for each example. - precision_at1 (float): - Output only. The precision when only - considering the label that has the highest - prediction score and not below the confidence - threshold for each example. - false_positive_rate_at1 (float): - Output only. The False Positive Rate when - only considering the label that has the highest - prediction score and not below the confidence - threshold for each example. - f1_score_at1 (float): - Output only. The harmonic mean of - [recall_at1][google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.recall_at1] - and - [precision_at1][google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.precision_at1]. - true_positive_count (int): - Output only. The number of model created - labels that match a ground truth label. - false_positive_count (int): - Output only. The number of model created - labels that do not match a ground truth label. - false_negative_count (int): - Output only. The number of ground truth - labels that are not matched by a model created - label. - true_negative_count (int): - Output only. The number of labels that were - not created by the model, but if they would, - they would not match a ground truth label. - """ - - confidence_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - position_threshold: int = proto.Field( - proto.INT32, - number=14, - ) - recall: float = proto.Field( - proto.FLOAT, - number=2, - ) - precision: float = proto.Field( - proto.FLOAT, - number=3, - ) - false_positive_rate: float = proto.Field( - proto.FLOAT, - number=8, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=4, - ) - recall_at1: float = proto.Field( - proto.FLOAT, - number=5, - ) - precision_at1: float = proto.Field( - proto.FLOAT, - number=6, - ) - false_positive_rate_at1: float = proto.Field( - proto.FLOAT, - number=9, - ) - f1_score_at1: float = proto.Field( - proto.FLOAT, - number=7, - ) - true_positive_count: int = proto.Field( - proto.INT64, - number=10, - ) - false_positive_count: int = proto.Field( - proto.INT64, - number=11, - ) - false_negative_count: int = proto.Field( - proto.INT64, - number=12, - ) - true_negative_count: int = proto.Field( - proto.INT64, - number=13, - ) - - class ConfusionMatrix(proto.Message): - r"""Confusion matrix of the model running the classification. - - Attributes: - annotation_spec_id (MutableSequence[str]): - Output only. IDs of the annotation specs used in the - confusion matrix. For Tables CLASSIFICATION - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type] - only list of [annotation_spec_display_name-s][] is - populated. - display_name (MutableSequence[str]): - Output only. Display name of the annotation specs used in - the confusion matrix, as they were at the moment of the - evaluation. For Tables CLASSIFICATION - - [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type], - distinct values of the target column at the moment of the - model evaluation are populated here. - row (MutableSequence[google.cloud.automl_v1beta1.types.ClassificationEvaluationMetrics.ConfusionMatrix.Row]): - Output only. Rows in the confusion matrix. The number of - rows is equal to the size of ``annotation_spec_id``. - ``row[i].example_count[j]`` is the number of examples that - have ground truth of the ``annotation_spec_id[i]`` and are - predicted as ``annotation_spec_id[j]`` by the model being - evaluated. - """ - - class Row(proto.Message): - r"""Output only. A row in the confusion matrix. - - Attributes: - example_count (MutableSequence[int]): - Output only. Value of the specific cell in the confusion - matrix. The number of values each row has (i.e. the length - of the row) is equal to the length of the - ``annotation_spec_id`` field or, if that one is not - populated, length of the - [display_name][google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfusionMatrix.display_name] - field. - """ - - example_count: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=1, - ) - - annotation_spec_id: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - display_name: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - row: MutableSequence['ClassificationEvaluationMetrics.ConfusionMatrix.Row'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ClassificationEvaluationMetrics.ConfusionMatrix.Row', - ) - - au_prc: float = proto.Field( - proto.FLOAT, - number=1, - ) - base_au_prc: float = proto.Field( - proto.FLOAT, - number=2, - ) - au_roc: float = proto.Field( - proto.FLOAT, - number=6, - ) - log_loss: float = proto.Field( - proto.FLOAT, - number=7, - ) - confidence_metrics_entry: MutableSequence[ConfidenceMetricsEntry] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=ConfidenceMetricsEntry, - ) - confusion_matrix: ConfusionMatrix = proto.Field( - proto.MESSAGE, - number=4, - message=ConfusionMatrix, - ) - annotation_spec_id: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/column_spec.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/column_spec.py deleted file mode 100644 index 9c1d49cc6814..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/column_spec.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import data_stats as gca_data_stats -from google.cloud.automl_v1beta1.types import data_types - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'ColumnSpec', - }, -) - - -class ColumnSpec(proto.Message): - r"""A representation of a column in a relational table. When listing - them, column specs are returned in the same order in which they were - given on import . Used by: - - - Tables - - Attributes: - name (str): - Output only. The resource name of the column specs. Form: - - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/tableSpecs/{table_spec_id}/columnSpecs/{column_spec_id}`` - data_type (google.cloud.automl_v1beta1.types.DataType): - The data type of elements stored in the - column. - display_name (str): - Output only. The name of the column to show in the - interface. The name can be up to 100 characters long and can - consist only of ASCII Latin letters A-Z and a-z, ASCII - digits 0-9, underscores(_), and forward slashes(/), and must - start with a letter or a digit. - data_stats (google.cloud.automl_v1beta1.types.DataStats): - Output only. Stats of the series of values in the column. - This field may be stale, see the ancestor's - Dataset.tables_dataset_metadata.stats_update_time field for - the timestamp at which these stats were last updated. - top_correlated_columns (MutableSequence[google.cloud.automl_v1beta1.types.ColumnSpec.CorrelatedColumn]): - Deprecated. - etag (str): - Used to perform consistent read-modify-write - updates. If not set, a blind "overwrite" update - happens. - """ - - class CorrelatedColumn(proto.Message): - r"""Identifies the table's column, and its correlation with the - column this ColumnSpec describes. - - Attributes: - column_spec_id (str): - The column_spec_id of the correlated column, which belongs - to the same table as the in-context column. - correlation_stats (google.cloud.automl_v1beta1.types.CorrelationStats): - Correlation between this and the in-context - column. - """ - - column_spec_id: str = proto.Field( - proto.STRING, - number=1, - ) - correlation_stats: gca_data_stats.CorrelationStats = proto.Field( - proto.MESSAGE, - number=2, - message=gca_data_stats.CorrelationStats, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_type: data_types.DataType = proto.Field( - proto.MESSAGE, - number=2, - message=data_types.DataType, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - data_stats: gca_data_stats.DataStats = proto.Field( - proto.MESSAGE, - number=4, - message=gca_data_stats.DataStats, - ) - top_correlated_columns: MutableSequence[CorrelatedColumn] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=CorrelatedColumn, - ) - etag: str = proto.Field( - proto.STRING, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_items.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_items.py deleted file mode 100644 index 776490a1c2a5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_items.py +++ /dev/null @@ -1,398 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import geometry -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import text_segment as gca_text_segment -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'Image', - 'TextSnippet', - 'DocumentDimensions', - 'Document', - 'Row', - 'ExamplePayload', - }, -) - - -class Image(proto.Message): - r"""A representation of an image. - Only images up to 30MB in size are supported. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image_bytes (bytes): - Image content represented as a stream of bytes. Note: As - with all ``bytes`` fields, protobuffers use a pure binary - representation, whereas JSON representations use base64. - - This field is a member of `oneof`_ ``data``. - input_config (google.cloud.automl_v1beta1.types.InputConfig): - An input config specifying the content of the - image. - - This field is a member of `oneof`_ ``data``. - thumbnail_uri (str): - Output only. HTTP URI to the thumbnail image. - """ - - image_bytes: bytes = proto.Field( - proto.BYTES, - number=1, - oneof='data', - ) - input_config: io.InputConfig = proto.Field( - proto.MESSAGE, - number=6, - oneof='data', - message=io.InputConfig, - ) - thumbnail_uri: str = proto.Field( - proto.STRING, - number=4, - ) - - -class TextSnippet(proto.Message): - r"""A representation of a text snippet. - - Attributes: - content (str): - Required. The content of the text snippet as - a string. Up to 250000 characters long. - mime_type (str): - Optional. The format of - [content][google.cloud.automl.v1beta1.TextSnippet.content]. - Currently the only two allowed values are "text/html" and - "text/plain". If left blank, the format is automatically - determined from the type of the uploaded - [content][google.cloud.automl.v1beta1.TextSnippet.content]. - content_uri (str): - Output only. HTTP URI where you can download - the content. - """ - - content: str = proto.Field( - proto.STRING, - number=1, - ) - mime_type: str = proto.Field( - proto.STRING, - number=2, - ) - content_uri: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DocumentDimensions(proto.Message): - r"""Message that describes dimension of a document. - - Attributes: - unit (google.cloud.automl_v1beta1.types.DocumentDimensions.DocumentDimensionUnit): - Unit of the dimension. - width (float): - Width value of the document, works together - with the unit. - height (float): - Height value of the document, works together - with the unit. - """ - class DocumentDimensionUnit(proto.Enum): - r"""Unit of the document dimension. - - Values: - DOCUMENT_DIMENSION_UNIT_UNSPECIFIED (0): - Should not be used. - INCH (1): - Document dimension is measured in inches. - CENTIMETER (2): - Document dimension is measured in - centimeters. - POINT (3): - Document dimension is measured in points. 72 - points = 1 inch. - """ - DOCUMENT_DIMENSION_UNIT_UNSPECIFIED = 0 - INCH = 1 - CENTIMETER = 2 - POINT = 3 - - unit: DocumentDimensionUnit = proto.Field( - proto.ENUM, - number=1, - enum=DocumentDimensionUnit, - ) - width: float = proto.Field( - proto.FLOAT, - number=2, - ) - height: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class Document(proto.Message): - r"""A structured text document e.g. a PDF. - - Attributes: - input_config (google.cloud.automl_v1beta1.types.DocumentInputConfig): - An input config specifying the content of the - document. - document_text (google.cloud.automl_v1beta1.types.TextSnippet): - The plain text version of this document. - layout (MutableSequence[google.cloud.automl_v1beta1.types.Document.Layout]): - Describes the layout of the document. Sorted by - [page_number][]. - document_dimensions (google.cloud.automl_v1beta1.types.DocumentDimensions): - The dimensions of the page in the document. - page_count (int): - Number of pages in the document. - """ - - class Layout(proto.Message): - r"""Describes the layout information of a - [text_segment][google.cloud.automl.v1beta1.Document.Layout.text_segment] - in the document. - - Attributes: - text_segment (google.cloud.automl_v1beta1.types.TextSegment): - Text Segment that represents a segment in - [document_text][google.cloud.automl.v1beta1.Document.document_text]. - page_number (int): - Page number of the - [text_segment][google.cloud.automl.v1beta1.Document.Layout.text_segment] - in the original document, starts from 1. - bounding_poly (google.cloud.automl_v1beta1.types.BoundingPoly): - The position of the - [text_segment][google.cloud.automl.v1beta1.Document.Layout.text_segment] - in the page. Contains exactly 4 - - [normalized_vertices][google.cloud.automl.v1beta1.BoundingPoly.normalized_vertices] - and they are connected by edges in the order provided, which - will represent a rectangle parallel to the frame. The - [NormalizedVertex-s][google.cloud.automl.v1beta1.NormalizedVertex] - are relative to the page. Coordinates are based on top-left - as point (0,0). - text_segment_type (google.cloud.automl_v1beta1.types.Document.Layout.TextSegmentType): - The type of the - [text_segment][google.cloud.automl.v1beta1.Document.Layout.text_segment] - in document. - """ - class TextSegmentType(proto.Enum): - r"""The type of TextSegment in the context of the original - document. - - Values: - TEXT_SEGMENT_TYPE_UNSPECIFIED (0): - Should not be used. - TOKEN (1): - The text segment is a token. e.g. word. - PARAGRAPH (2): - The text segment is a paragraph. - FORM_FIELD (3): - The text segment is a form field. - FORM_FIELD_NAME (4): - The text segment is the name part of a form field. It will - be treated as child of another FORM_FIELD TextSegment if its - span is subspan of another TextSegment with type FORM_FIELD. - FORM_FIELD_CONTENTS (5): - The text segment is the text content part of a form field. - It will be treated as child of another FORM_FIELD - TextSegment if its span is subspan of another TextSegment - with type FORM_FIELD. - TABLE (6): - The text segment is a whole table, including - headers, and all rows. - TABLE_HEADER (7): - The text segment is a table's headers. It - will be treated as child of another TABLE - TextSegment if its span is subspan of another - TextSegment with type TABLE. - TABLE_ROW (8): - The text segment is a row in table. It will - be treated as child of another TABLE TextSegment - if its span is subspan of another TextSegment - with type TABLE. - TABLE_CELL (9): - The text segment is a cell in table. It will be treated as - child of another TABLE_ROW TextSegment if its span is - subspan of another TextSegment with type TABLE_ROW. - """ - TEXT_SEGMENT_TYPE_UNSPECIFIED = 0 - TOKEN = 1 - PARAGRAPH = 2 - FORM_FIELD = 3 - FORM_FIELD_NAME = 4 - FORM_FIELD_CONTENTS = 5 - TABLE = 6 - TABLE_HEADER = 7 - TABLE_ROW = 8 - TABLE_CELL = 9 - - text_segment: gca_text_segment.TextSegment = proto.Field( - proto.MESSAGE, - number=1, - message=gca_text_segment.TextSegment, - ) - page_number: int = proto.Field( - proto.INT32, - number=2, - ) - bounding_poly: geometry.BoundingPoly = proto.Field( - proto.MESSAGE, - number=3, - message=geometry.BoundingPoly, - ) - text_segment_type: 'Document.Layout.TextSegmentType' = proto.Field( - proto.ENUM, - number=4, - enum='Document.Layout.TextSegmentType', - ) - - input_config: io.DocumentInputConfig = proto.Field( - proto.MESSAGE, - number=1, - message=io.DocumentInputConfig, - ) - document_text: 'TextSnippet' = proto.Field( - proto.MESSAGE, - number=2, - message='TextSnippet', - ) - layout: MutableSequence[Layout] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Layout, - ) - document_dimensions: 'DocumentDimensions' = proto.Field( - proto.MESSAGE, - number=4, - message='DocumentDimensions', - ) - page_count: int = proto.Field( - proto.INT32, - number=5, - ) - - -class Row(proto.Message): - r"""A representation of a row in a relational table. - - Attributes: - column_spec_ids (MutableSequence[str]): - The resource IDs of the column specs describing the columns - of the row. If set must contain, but possibly in a different - order, all input feature - - [column_spec_ids][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] - of the Model this row is being passed to. Note: The below - ``values`` field must match order of this field, if this - field is set. - values (MutableSequence[google.protobuf.struct_pb2.Value]): - Required. The values of the row cells, given in the same - order as the column_spec_ids, or, if not set, then in the - same order as input feature - - [column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] - of the Model this row is being passed to. - """ - - column_spec_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - values: MutableSequence[struct_pb2.Value] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=struct_pb2.Value, - ) - - -class ExamplePayload(proto.Message): - r"""Example data used for training or prediction. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image (google.cloud.automl_v1beta1.types.Image): - Example image. - - This field is a member of `oneof`_ ``payload``. - text_snippet (google.cloud.automl_v1beta1.types.TextSnippet): - Example text. - - This field is a member of `oneof`_ ``payload``. - document (google.cloud.automl_v1beta1.types.Document): - Example document. - - This field is a member of `oneof`_ ``payload``. - row (google.cloud.automl_v1beta1.types.Row): - Example relational table row. - - This field is a member of `oneof`_ ``payload``. - """ - - image: 'Image' = proto.Field( - proto.MESSAGE, - number=1, - oneof='payload', - message='Image', - ) - text_snippet: 'TextSnippet' = proto.Field( - proto.MESSAGE, - number=2, - oneof='payload', - message='TextSnippet', - ) - document: 'Document' = proto.Field( - proto.MESSAGE, - number=4, - oneof='payload', - message='Document', - ) - row: 'Row' = proto.Field( - proto.MESSAGE, - number=3, - oneof='payload', - message='Row', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_stats.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_stats.py deleted file mode 100644 index 2d4c9dd26de7..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_stats.py +++ /dev/null @@ -1,361 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'DataStats', - 'Float64Stats', - 'StringStats', - 'TimestampStats', - 'ArrayStats', - 'StructStats', - 'CategoryStats', - 'CorrelationStats', - }, -) - - -class DataStats(proto.Message): - r"""The data statistics of a series of values that share the same - DataType. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - float64_stats (google.cloud.automl_v1beta1.types.Float64Stats): - The statistics for FLOAT64 DataType. - - This field is a member of `oneof`_ ``stats``. - string_stats (google.cloud.automl_v1beta1.types.StringStats): - The statistics for STRING DataType. - - This field is a member of `oneof`_ ``stats``. - timestamp_stats (google.cloud.automl_v1beta1.types.TimestampStats): - The statistics for TIMESTAMP DataType. - - This field is a member of `oneof`_ ``stats``. - array_stats (google.cloud.automl_v1beta1.types.ArrayStats): - The statistics for ARRAY DataType. - - This field is a member of `oneof`_ ``stats``. - struct_stats (google.cloud.automl_v1beta1.types.StructStats): - The statistics for STRUCT DataType. - - This field is a member of `oneof`_ ``stats``. - category_stats (google.cloud.automl_v1beta1.types.CategoryStats): - The statistics for CATEGORY DataType. - - This field is a member of `oneof`_ ``stats``. - distinct_value_count (int): - The number of distinct values. - null_value_count (int): - The number of values that are null. - valid_value_count (int): - The number of values that are valid. - """ - - float64_stats: 'Float64Stats' = proto.Field( - proto.MESSAGE, - number=3, - oneof='stats', - message='Float64Stats', - ) - string_stats: 'StringStats' = proto.Field( - proto.MESSAGE, - number=4, - oneof='stats', - message='StringStats', - ) - timestamp_stats: 'TimestampStats' = proto.Field( - proto.MESSAGE, - number=5, - oneof='stats', - message='TimestampStats', - ) - array_stats: 'ArrayStats' = proto.Field( - proto.MESSAGE, - number=6, - oneof='stats', - message='ArrayStats', - ) - struct_stats: 'StructStats' = proto.Field( - proto.MESSAGE, - number=7, - oneof='stats', - message='StructStats', - ) - category_stats: 'CategoryStats' = proto.Field( - proto.MESSAGE, - number=8, - oneof='stats', - message='CategoryStats', - ) - distinct_value_count: int = proto.Field( - proto.INT64, - number=1, - ) - null_value_count: int = proto.Field( - proto.INT64, - number=2, - ) - valid_value_count: int = proto.Field( - proto.INT64, - number=9, - ) - - -class Float64Stats(proto.Message): - r"""The data statistics of a series of FLOAT64 values. - - Attributes: - mean (float): - The mean of the series. - standard_deviation (float): - The standard deviation of the series. - quantiles (MutableSequence[float]): - Ordered from 0 to k k-quantile values of the data series of - n values. The value at index i is, approximately, the - i*n/k-th smallest value in the series; for i = 0 and i = k - these are, respectively, the min and max values. - histogram_buckets (MutableSequence[google.cloud.automl_v1beta1.types.Float64Stats.HistogramBucket]): - Histogram buckets of the data series. Sorted by the min - value of the bucket, ascendingly, and the number of the - buckets is dynamically generated. The buckets are - non-overlapping and completely cover whole FLOAT64 range - with min of first bucket being ``"-Infinity"``, and max of - the last one being ``"Infinity"``. - """ - - class HistogramBucket(proto.Message): - r"""A bucket of a histogram. - - Attributes: - min_ (float): - The minimum value of the bucket, inclusive. - max_ (float): - The maximum value of the bucket, exclusive unless max = - ``"Infinity"``, in which case it's inclusive. - count (int): - The number of data values that are in the - bucket, i.e. are between min and max values. - """ - - min_: float = proto.Field( - proto.DOUBLE, - number=1, - ) - max_: float = proto.Field( - proto.DOUBLE, - number=2, - ) - count: int = proto.Field( - proto.INT64, - number=3, - ) - - mean: float = proto.Field( - proto.DOUBLE, - number=1, - ) - standard_deviation: float = proto.Field( - proto.DOUBLE, - number=2, - ) - quantiles: MutableSequence[float] = proto.RepeatedField( - proto.DOUBLE, - number=3, - ) - histogram_buckets: MutableSequence[HistogramBucket] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=HistogramBucket, - ) - - -class StringStats(proto.Message): - r"""The data statistics of a series of STRING values. - - Attributes: - top_unigram_stats (MutableSequence[google.cloud.automl_v1beta1.types.StringStats.UnigramStats]): - The statistics of the top 20 unigrams, ordered by - [count][google.cloud.automl.v1beta1.StringStats.UnigramStats.count]. - """ - - class UnigramStats(proto.Message): - r"""The statistics of a unigram. - - Attributes: - value (str): - The unigram. - count (int): - The number of occurrences of this unigram in - the series. - """ - - value: str = proto.Field( - proto.STRING, - number=1, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - top_unigram_stats: MutableSequence[UnigramStats] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=UnigramStats, - ) - - -class TimestampStats(proto.Message): - r"""The data statistics of a series of TIMESTAMP values. - - Attributes: - granular_stats (MutableMapping[str, google.cloud.automl_v1beta1.types.TimestampStats.GranularStats]): - The string key is the pre-defined granularity. Currently - supported: hour_of_day, day_of_week, month_of_year. - Granularities finer that the granularity of timestamp data - are not populated (e.g. if timestamps are at day - granularity, then hour_of_day is not populated). - """ - - class GranularStats(proto.Message): - r"""Stats split by a defined in context granularity. - - Attributes: - buckets (MutableMapping[int, int]): - A map from granularity key to example count for that key. - E.g. for hour_of_day ``13`` means 1pm, or for month_of_year - ``5`` means May). - """ - - buckets: MutableMapping[int, int] = proto.MapField( - proto.INT32, - proto.INT64, - number=1, - ) - - granular_stats: MutableMapping[str, GranularStats] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message=GranularStats, - ) - - -class ArrayStats(proto.Message): - r"""The data statistics of a series of ARRAY values. - - Attributes: - member_stats (google.cloud.automl_v1beta1.types.DataStats): - Stats of all the values of all arrays, as if - they were a single long series of data. The type - depends on the element type of the array. - """ - - member_stats: 'DataStats' = proto.Field( - proto.MESSAGE, - number=2, - message='DataStats', - ) - - -class StructStats(proto.Message): - r"""The data statistics of a series of STRUCT values. - - Attributes: - field_stats (MutableMapping[str, google.cloud.automl_v1beta1.types.DataStats]): - Map from a field name of the struct to data - stats aggregated over series of all data in that - field across all the structs. - """ - - field_stats: MutableMapping[str, 'DataStats'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='DataStats', - ) - - -class CategoryStats(proto.Message): - r"""The data statistics of a series of CATEGORY values. - - Attributes: - top_category_stats (MutableSequence[google.cloud.automl_v1beta1.types.CategoryStats.SingleCategoryStats]): - The statistics of the top 20 CATEGORY values, ordered by - - [count][google.cloud.automl.v1beta1.CategoryStats.SingleCategoryStats.count]. - """ - - class SingleCategoryStats(proto.Message): - r"""The statistics of a single CATEGORY value. - - Attributes: - value (str): - The CATEGORY value. - count (int): - The number of occurrences of this value in - the series. - """ - - value: str = proto.Field( - proto.STRING, - number=1, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - - top_category_stats: MutableSequence[SingleCategoryStats] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=SingleCategoryStats, - ) - - -class CorrelationStats(proto.Message): - r"""A correlation statistics between two series of DataType - values. The series may have differing DataType-s, but within a - single series the DataType must be the same. - - Attributes: - cramers_v (float): - The correlation value using the Cramer's V - measure. - """ - - cramers_v: float = proto.Field( - proto.DOUBLE, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_types.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_types.py deleted file mode 100644 index f266b5f734d9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/data_types.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TypeCode', - 'DataType', - 'StructType', - }, -) - - -class TypeCode(proto.Enum): - r"""``TypeCode`` is used as a part of - [DataType][google.cloud.automl.v1beta1.DataType]. - - Values: - TYPE_CODE_UNSPECIFIED (0): - Not specified. Should not be used. - FLOAT64 (3): - Encoded as ``number``, or the strings ``"NaN"``, - ``"Infinity"``, or ``"-Infinity"``. - TIMESTAMP (4): - Must be between 0AD and 9999AD. Encoded as ``string`` - according to - [time_format][google.cloud.automl.v1beta1.DataType.time_format], - or, if that format is not set, then in RFC 3339 - ``date-time`` format, where ``time-offset`` = ``"Z"`` (e.g. - 1985-04-12T23:20:50.52Z). - STRING (6): - Encoded as ``string``. - ARRAY (8): - Encoded as ``list``, where the list elements are represented - according to - - [list_element_type][google.cloud.automl.v1beta1.DataType.list_element_type]. - STRUCT (9): - Encoded as ``struct``, where field values are represented - according to - [struct_type][google.cloud.automl.v1beta1.DataType.struct_type]. - CATEGORY (10): - Values of this type are not further understood by AutoML, - e.g. AutoML is unable to tell the order of values (as it - could with FLOAT64), or is unable to say if one value - contains another (as it could with STRING). Encoded as - ``string`` (bytes should be base64-encoded, as described in - RFC 4648, section 4). - """ - TYPE_CODE_UNSPECIFIED = 0 - FLOAT64 = 3 - TIMESTAMP = 4 - STRING = 6 - ARRAY = 8 - STRUCT = 9 - CATEGORY = 10 - - -class DataType(proto.Message): - r"""Indicated the type of data that can be stored in a structured - data entity (e.g. a table). - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - list_element_type (google.cloud.automl_v1beta1.types.DataType): - If - [type_code][google.cloud.automl.v1beta1.DataType.type_code] - == [ARRAY][google.cloud.automl.v1beta1.TypeCode.ARRAY], then - ``list_element_type`` is the type of the elements. - - This field is a member of `oneof`_ ``details``. - struct_type (google.cloud.automl_v1beta1.types.StructType): - If - [type_code][google.cloud.automl.v1beta1.DataType.type_code] - == [STRUCT][google.cloud.automl.v1beta1.TypeCode.STRUCT], - then ``struct_type`` provides type information for the - struct's fields. - - This field is a member of `oneof`_ ``details``. - time_format (str): - If - [type_code][google.cloud.automl.v1beta1.DataType.type_code] - == - [TIMESTAMP][google.cloud.automl.v1beta1.TypeCode.TIMESTAMP] - then ``time_format`` provides the format in which that time - field is expressed. The time_format must either be one of: - - - ``UNIX_SECONDS`` - - ``UNIX_MILLISECONDS`` - - ``UNIX_MICROSECONDS`` - - ``UNIX_NANOSECONDS`` (for respectively number of seconds, - milliseconds, microseconds and nanoseconds since start of - the Unix epoch); or be written in ``strftime`` syntax. If - time_format is not set, then the default format as - described on the type_code is used. - - This field is a member of `oneof`_ ``details``. - type_code (google.cloud.automl_v1beta1.types.TypeCode): - Required. The - [TypeCode][google.cloud.automl.v1beta1.TypeCode] for this - type. - nullable (bool): - If true, this DataType can also be ``NULL``. In .CSV files - ``NULL`` value is expressed as an empty string. - """ - - list_element_type: 'DataType' = proto.Field( - proto.MESSAGE, - number=2, - oneof='details', - message='DataType', - ) - struct_type: 'StructType' = proto.Field( - proto.MESSAGE, - number=3, - oneof='details', - message='StructType', - ) - time_format: str = proto.Field( - proto.STRING, - number=5, - oneof='details', - ) - type_code: 'TypeCode' = proto.Field( - proto.ENUM, - number=1, - enum='TypeCode', - ) - nullable: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class StructType(proto.Message): - r"""``StructType`` defines the DataType-s of a - [STRUCT][google.cloud.automl.v1beta1.TypeCode.STRUCT] type. - - Attributes: - fields (MutableMapping[str, google.cloud.automl_v1beta1.types.DataType]): - Unordered map of struct field names to their - data types. Fields cannot be added or removed - via Update. Their names and data types are still - mutable. - """ - - fields: MutableMapping[str, 'DataType'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='DataType', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/dataset.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/dataset.py deleted file mode 100644 index 9f9a99f0a8db..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/dataset.py +++ /dev/null @@ -1,198 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import image -from google.cloud.automl_v1beta1.types import tables -from google.cloud.automl_v1beta1.types import text -from google.cloud.automl_v1beta1.types import translation -from google.cloud.automl_v1beta1.types import video -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'Dataset', - }, -) - - -class Dataset(proto.Message): - r"""A workspace for solving a single, particular machine learning - (ML) problem. A workspace contains examples that may be - annotated. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation_dataset_metadata (google.cloud.automl_v1beta1.types.TranslationDatasetMetadata): - Metadata for a dataset used for translation. - - This field is a member of `oneof`_ ``dataset_metadata``. - image_classification_dataset_metadata (google.cloud.automl_v1beta1.types.ImageClassificationDatasetMetadata): - Metadata for a dataset used for image - classification. - - This field is a member of `oneof`_ ``dataset_metadata``. - text_classification_dataset_metadata (google.cloud.automl_v1beta1.types.TextClassificationDatasetMetadata): - Metadata for a dataset used for text - classification. - - This field is a member of `oneof`_ ``dataset_metadata``. - image_object_detection_dataset_metadata (google.cloud.automl_v1beta1.types.ImageObjectDetectionDatasetMetadata): - Metadata for a dataset used for image object - detection. - - This field is a member of `oneof`_ ``dataset_metadata``. - video_classification_dataset_metadata (google.cloud.automl_v1beta1.types.VideoClassificationDatasetMetadata): - Metadata for a dataset used for video - classification. - - This field is a member of `oneof`_ ``dataset_metadata``. - video_object_tracking_dataset_metadata (google.cloud.automl_v1beta1.types.VideoObjectTrackingDatasetMetadata): - Metadata for a dataset used for video object - tracking. - - This field is a member of `oneof`_ ``dataset_metadata``. - text_extraction_dataset_metadata (google.cloud.automl_v1beta1.types.TextExtractionDatasetMetadata): - Metadata for a dataset used for text - extraction. - - This field is a member of `oneof`_ ``dataset_metadata``. - text_sentiment_dataset_metadata (google.cloud.automl_v1beta1.types.TextSentimentDatasetMetadata): - Metadata for a dataset used for text - sentiment. - - This field is a member of `oneof`_ ``dataset_metadata``. - tables_dataset_metadata (google.cloud.automl_v1beta1.types.TablesDatasetMetadata): - Metadata for a dataset used for Tables. - - This field is a member of `oneof`_ ``dataset_metadata``. - name (str): - Output only. The resource name of the dataset. Form: - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`` - display_name (str): - Required. The name of the dataset to show in the interface. - The name can be up to 32 characters long and can consist - only of ASCII Latin letters A-Z and a-z, underscores (_), - and ASCII digits 0-9. - description (str): - User-provided description of the dataset. The - description can be up to 25000 characters long. - example_count (int): - Output only. The number of examples in the - dataset. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when this dataset was - created. - etag (str): - Used to perform consistent read-modify-write - updates. If not set, a blind "overwrite" update - happens. - """ - - translation_dataset_metadata: translation.TranslationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=23, - oneof='dataset_metadata', - message=translation.TranslationDatasetMetadata, - ) - image_classification_dataset_metadata: image.ImageClassificationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=24, - oneof='dataset_metadata', - message=image.ImageClassificationDatasetMetadata, - ) - text_classification_dataset_metadata: text.TextClassificationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=25, - oneof='dataset_metadata', - message=text.TextClassificationDatasetMetadata, - ) - image_object_detection_dataset_metadata: image.ImageObjectDetectionDatasetMetadata = proto.Field( - proto.MESSAGE, - number=26, - oneof='dataset_metadata', - message=image.ImageObjectDetectionDatasetMetadata, - ) - video_classification_dataset_metadata: video.VideoClassificationDatasetMetadata = proto.Field( - proto.MESSAGE, - number=31, - oneof='dataset_metadata', - message=video.VideoClassificationDatasetMetadata, - ) - video_object_tracking_dataset_metadata: video.VideoObjectTrackingDatasetMetadata = proto.Field( - proto.MESSAGE, - number=29, - oneof='dataset_metadata', - message=video.VideoObjectTrackingDatasetMetadata, - ) - text_extraction_dataset_metadata: text.TextExtractionDatasetMetadata = proto.Field( - proto.MESSAGE, - number=28, - oneof='dataset_metadata', - message=text.TextExtractionDatasetMetadata, - ) - text_sentiment_dataset_metadata: text.TextSentimentDatasetMetadata = proto.Field( - proto.MESSAGE, - number=30, - oneof='dataset_metadata', - message=text.TextSentimentDatasetMetadata, - ) - tables_dataset_metadata: tables.TablesDatasetMetadata = proto.Field( - proto.MESSAGE, - number=33, - oneof='dataset_metadata', - message=tables.TablesDatasetMetadata, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - example_count: int = proto.Field( - proto.INT32, - number=21, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - etag: str = proto.Field( - proto.STRING, - number=17, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/detection.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/detection.py deleted file mode 100644 index 1e5b8f722ad9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/detection.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import geometry -from google.protobuf import duration_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'ImageObjectDetectionAnnotation', - 'VideoObjectTrackingAnnotation', - 'BoundingBoxMetricsEntry', - 'ImageObjectDetectionEvaluationMetrics', - 'VideoObjectTrackingEvaluationMetrics', - }, -) - - -class ImageObjectDetectionAnnotation(proto.Message): - r"""Annotation details for image object detection. - - Attributes: - bounding_box (google.cloud.automl_v1beta1.types.BoundingPoly): - Output only. The rectangle representing the - object location. - score (float): - Output only. The confidence that this annotation is positive - for the parent example, value in [0, 1], higher means higher - positivity confidence. - """ - - bounding_box: geometry.BoundingPoly = proto.Field( - proto.MESSAGE, - number=1, - message=geometry.BoundingPoly, - ) - score: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class VideoObjectTrackingAnnotation(proto.Message): - r"""Annotation details for video object tracking. - - Attributes: - instance_id (str): - Optional. The instance of the object, - expressed as a positive integer. Used to tell - apart objects of the same type (i.e. - AnnotationSpec) when multiple are present on a - single example. - NOTE: Instance ID prediction quality is not a - part of model evaluation and is done as best - effort. Especially in cases when an entity goes - off-screen for a longer time (minutes), when it - comes back it may be given a new instance ID. - time_offset (google.protobuf.duration_pb2.Duration): - Required. A time (frame) of a video to which - this annotation pertains. Represented as the - duration since the video's start. - bounding_box (google.cloud.automl_v1beta1.types.BoundingPoly): - Required. The rectangle representing the object location on - the frame (i.e. at the time_offset of the video). - score (float): - Output only. The confidence that this annotation is positive - for the video at the time_offset, value in [0, 1], higher - means higher positivity confidence. For annotations created - by the user the score is 1. When user approves an - annotation, the original float score is kept (and not - changed to 1). - """ - - instance_id: str = proto.Field( - proto.STRING, - number=1, - ) - time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - bounding_box: geometry.BoundingPoly = proto.Field( - proto.MESSAGE, - number=3, - message=geometry.BoundingPoly, - ) - score: float = proto.Field( - proto.FLOAT, - number=4, - ) - - -class BoundingBoxMetricsEntry(proto.Message): - r"""Bounding box matching model metrics for a single - intersection-over-union threshold and multiple label match - confidence thresholds. - - Attributes: - iou_threshold (float): - Output only. The intersection-over-union - threshold value used to compute this metrics - entry. - mean_average_precision (float): - Output only. The mean average precision, most often close to - au_prc. - confidence_metrics_entries (MutableSequence[google.cloud.automl_v1beta1.types.BoundingBoxMetricsEntry.ConfidenceMetricsEntry]): - Output only. Metrics for each label-match - confidence_threshold from - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99. Precision-recall - curve is derived from them. - """ - - class ConfidenceMetricsEntry(proto.Message): - r"""Metrics for a single confidence threshold. - - Attributes: - confidence_threshold (float): - Output only. The confidence threshold value - used to compute the metrics. - recall (float): - Output only. Recall under the given - confidence threshold. - precision (float): - Output only. Precision under the given - confidence threshold. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - """ - - confidence_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - recall: float = proto.Field( - proto.FLOAT, - number=2, - ) - precision: float = proto.Field( - proto.FLOAT, - number=3, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=4, - ) - - iou_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - mean_average_precision: float = proto.Field( - proto.FLOAT, - number=2, - ) - confidence_metrics_entries: MutableSequence[ConfidenceMetricsEntry] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=ConfidenceMetricsEntry, - ) - - -class ImageObjectDetectionEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for image object detection problems. - Evaluates prediction quality of labeled bounding boxes. - - Attributes: - evaluated_bounding_box_count (int): - Output only. The total number of bounding - boxes (i.e. summed over all images) the ground - truth used to create this evaluation had. - bounding_box_metrics_entries (MutableSequence[google.cloud.automl_v1beta1.types.BoundingBoxMetricsEntry]): - Output only. The bounding boxes match metrics - for each Intersection-over-union threshold - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 and each - label confidence threshold - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 pair. - bounding_box_mean_average_precision (float): - Output only. The single metric for bounding boxes - evaluation: the mean_average_precision averaged over all - bounding_box_metrics_entries. - """ - - evaluated_bounding_box_count: int = proto.Field( - proto.INT32, - number=1, - ) - bounding_box_metrics_entries: MutableSequence['BoundingBoxMetricsEntry'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='BoundingBoxMetricsEntry', - ) - bounding_box_mean_average_precision: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -class VideoObjectTrackingEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for video object tracking problems. - Evaluates prediction quality of both labeled bounding boxes and - labeled tracks (i.e. series of bounding boxes sharing same label - and instance ID). - - Attributes: - evaluated_frame_count (int): - Output only. The number of video frames used - to create this evaluation. - evaluated_bounding_box_count (int): - Output only. The total number of bounding - boxes (i.e. summed over all frames) the ground - truth used to create this evaluation had. - bounding_box_metrics_entries (MutableSequence[google.cloud.automl_v1beta1.types.BoundingBoxMetricsEntry]): - Output only. The bounding boxes match metrics - for each Intersection-over-union threshold - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 and each - label confidence threshold - 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 pair. - bounding_box_mean_average_precision (float): - Output only. The single metric for bounding boxes - evaluation: the mean_average_precision averaged over all - bounding_box_metrics_entries. - """ - - evaluated_frame_count: int = proto.Field( - proto.INT32, - number=1, - ) - evaluated_bounding_box_count: int = proto.Field( - proto.INT32, - number=2, - ) - bounding_box_metrics_entries: MutableSequence['BoundingBoxMetricsEntry'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='BoundingBoxMetricsEntry', - ) - bounding_box_mean_average_precision: float = proto.Field( - proto.FLOAT, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/geometry.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/geometry.py deleted file mode 100644 index 51c0c734ba22..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/geometry.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'NormalizedVertex', - 'BoundingPoly', - }, -) - - -class NormalizedVertex(proto.Message): - r"""A vertex represents a 2D point in the image. - The normalized vertex coordinates are between 0 to 1 fractions - relative to the original plane (image, video). E.g. if the plane - (e.g. whole image) would have size 10 x 20 then a point with - normalized coordinates (0.1, 0.3) would be at the position (1, - 6) on that plane. - - Attributes: - x (float): - Required. Horizontal coordinate. - y (float): - Required. Vertical coordinate. - """ - - x: float = proto.Field( - proto.FLOAT, - number=1, - ) - y: float = proto.Field( - proto.FLOAT, - number=2, - ) - - -class BoundingPoly(proto.Message): - r"""A bounding polygon of a detected object on a plane. On output both - vertices and normalized_vertices are provided. The polygon is formed - by connecting vertices in the order they are listed. - - Attributes: - normalized_vertices (MutableSequence[google.cloud.automl_v1beta1.types.NormalizedVertex]): - Output only . The bounding polygon normalized - vertices. - """ - - normalized_vertices: MutableSequence['NormalizedVertex'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='NormalizedVertex', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/image.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/image.py deleted file mode 100644 index e21a7bf11c0b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/image.py +++ /dev/null @@ -1,304 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import classification - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'ImageClassificationDatasetMetadata', - 'ImageObjectDetectionDatasetMetadata', - 'ImageClassificationModelMetadata', - 'ImageObjectDetectionModelMetadata', - 'ImageClassificationModelDeploymentMetadata', - 'ImageObjectDetectionModelDeploymentMetadata', - }, -) - - -class ImageClassificationDatasetMetadata(proto.Message): - r"""Dataset metadata that is specific to image classification. - - Attributes: - classification_type (google.cloud.automl_v1beta1.types.ClassificationType): - Required. Type of the classification problem. - """ - - classification_type: classification.ClassificationType = proto.Field( - proto.ENUM, - number=1, - enum=classification.ClassificationType, - ) - - -class ImageObjectDetectionDatasetMetadata(proto.Message): - r"""Dataset metadata specific to image object detection. - """ - - -class ImageClassificationModelMetadata(proto.Message): - r"""Model metadata for image classification. - - Attributes: - base_model_id (str): - Optional. The ID of the ``base`` model. If it is specified, - the new model will be created based on the ``base`` model. - Otherwise, the new model will be created from scratch. The - ``base`` model must be in the same ``project`` and - ``location`` as the new model to create, and have the same - ``model_type``. - train_budget (int): - Required. The train budget of creating this model, expressed - in hours. The actual ``train_cost`` will be equal or less - than this value. - train_cost (int): - Output only. The actual train cost of creating this model, - expressed in hours. If this model is created from a ``base`` - model, the train cost used to create the ``base`` model are - not included. - stop_reason (str): - Output only. The reason that this create model operation - stopped, e.g. ``BUDGET_REACHED``, ``MODEL_CONVERGED``. - model_type (str): - Optional. Type of the model. The available values are: - - - ``cloud`` - Model to be used via prediction calls to - AutoML API. This is the default value. - - ``mobile-low-latency-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have low latency, but may have - lower prediction quality than other models. - - ``mobile-versatile-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. - - ``mobile-high-accuracy-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have a higher latency, but should - also have a higher prediction quality than other models. - - ``mobile-core-ml-low-latency-1`` - A model that, in - addition to providing prediction via AutoML API, can also - be exported (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile device with Core ML afterwards. - Expected to have low latency, but may have lower - prediction quality than other models. - - ``mobile-core-ml-versatile-1`` - A model that, in - addition to providing prediction via AutoML API, can also - be exported (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile device with Core ML afterwards. - - ``mobile-core-ml-high-accuracy-1`` - A model that, in - addition to providing prediction via AutoML API, can also - be exported (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile device with Core ML afterwards. - Expected to have a higher latency, but should also have a - higher prediction quality than other models. - node_qps (float): - Output only. An approximate number of online - prediction QPS that can be supported by this - model per each node on which it is deployed. - node_count (int): - Output only. The number of nodes this model is deployed on. - A node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the node_qps field. - """ - - base_model_id: str = proto.Field( - proto.STRING, - number=1, - ) - train_budget: int = proto.Field( - proto.INT64, - number=2, - ) - train_cost: int = proto.Field( - proto.INT64, - number=3, - ) - stop_reason: str = proto.Field( - proto.STRING, - number=5, - ) - model_type: str = proto.Field( - proto.STRING, - number=7, - ) - node_qps: float = proto.Field( - proto.DOUBLE, - number=13, - ) - node_count: int = proto.Field( - proto.INT64, - number=14, - ) - - -class ImageObjectDetectionModelMetadata(proto.Message): - r"""Model metadata specific to image object detection. - - Attributes: - model_type (str): - Optional. Type of the model. The available values are: - - - ``cloud-high-accuracy-1`` - (default) A model to be used - via prediction calls to AutoML API. Expected to have a - higher latency, but should also have a higher prediction - quality than other models. - - ``cloud-low-latency-1`` - A model to be used via - prediction calls to AutoML API. Expected to have low - latency, but may have lower prediction quality than other - models. - - ``mobile-low-latency-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have low latency, but may have - lower prediction quality than other models. - - ``mobile-versatile-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. - - ``mobile-high-accuracy-1`` - A model that, in addition to - providing prediction via AutoML API, can also be exported - (see - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) - and used on a mobile or edge device with TensorFlow - afterwards. Expected to have a higher latency, but should - also have a higher prediction quality than other models. - node_count (int): - Output only. The number of nodes this model is deployed on. - A node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the qps_per_node - field. - node_qps (float): - Output only. An approximate number of online - prediction QPS that can be supported by this - model per each node on which it is deployed. - stop_reason (str): - Output only. The reason that this create model operation - stopped, e.g. ``BUDGET_REACHED``, ``MODEL_CONVERGED``. - train_budget_milli_node_hours (int): - The train budget of creating this model, expressed in milli - node hours i.e. 1,000 value in this field means 1 node hour. - The actual ``train_cost`` will be equal or less than this - value. If further model training ceases to provide any - improvements, it will stop without using full budget and the - stop_reason will be ``MODEL_CONVERGED``. Note, node_hour = - actual_hour \* number_of_nodes_invovled. For model type - ``cloud-high-accuracy-1``\ (default) and - ``cloud-low-latency-1``, the train budget must be between - 20,000 and 900,000 milli node hours, inclusive. The default - value is 216, 000 which represents one day in wall time. For - model type ``mobile-low-latency-1``, ``mobile-versatile-1``, - ``mobile-high-accuracy-1``, - ``mobile-core-ml-low-latency-1``, - ``mobile-core-ml-versatile-1``, - ``mobile-core-ml-high-accuracy-1``, the train budget must be - between 1,000 and 100,000 milli node hours, inclusive. The - default value is 24, 000 which represents one day in wall - time. - train_cost_milli_node_hours (int): - Output only. The actual train cost of - creating this model, expressed in milli node - hours, i.e. 1,000 value in this field means 1 - node hour. Guaranteed to not exceed the train - budget. - """ - - model_type: str = proto.Field( - proto.STRING, - number=1, - ) - node_count: int = proto.Field( - proto.INT64, - number=3, - ) - node_qps: float = proto.Field( - proto.DOUBLE, - number=4, - ) - stop_reason: str = proto.Field( - proto.STRING, - number=5, - ) - train_budget_milli_node_hours: int = proto.Field( - proto.INT64, - number=6, - ) - train_cost_milli_node_hours: int = proto.Field( - proto.INT64, - number=7, - ) - - -class ImageClassificationModelDeploymentMetadata(proto.Message): - r"""Model deployment metadata specific to Image Classification. - - Attributes: - node_count (int): - Input only. The number of nodes to deploy the model on. A - node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the model's - - [node_qps][google.cloud.automl.v1beta1.ImageClassificationModelMetadata.node_qps]. - Must be between 1 and 100, inclusive on both ends. - """ - - node_count: int = proto.Field( - proto.INT64, - number=1, - ) - - -class ImageObjectDetectionModelDeploymentMetadata(proto.Message): - r"""Model deployment metadata specific to Image Object Detection. - - Attributes: - node_count (int): - Input only. The number of nodes to deploy the model on. A - node is an abstraction of a machine resource, which can - handle online prediction QPS as given in the model's - - [qps_per_node][google.cloud.automl.v1beta1.ImageObjectDetectionModelMetadata.qps_per_node]. - Must be between 1 and 100, inclusive on both ends. - """ - - node_count: int = proto.Field( - proto.INT64, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/io.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/io.py deleted file mode 100644 index e1a4d798935c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/io.py +++ /dev/null @@ -1,1253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'InputConfig', - 'BatchPredictInputConfig', - 'DocumentInputConfig', - 'OutputConfig', - 'BatchPredictOutputConfig', - 'ModelExportOutputConfig', - 'ExportEvaluatedExamplesOutputConfig', - 'GcsSource', - 'BigQuerySource', - 'GcsDestination', - 'BigQueryDestination', - 'GcrDestination', - }, -) - - -class InputConfig(proto.Message): - r"""Input configuration for ImportData Action. - - The format of input depends on dataset_metadata the Dataset into - which the import is happening has. As input source the - [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] is - expected, unless specified otherwise. Additionally any input .CSV - file by itself must be 100MB or smaller, unless specified otherwise. - If an "example" file (that is, image, video etc.) with identical - content (even if it had different GCS_FILE_PATH) is mentioned - multiple times, then its label, bounding boxes etc. are appended. - The same file should be always provided with the same ML_USE and - GCS_FILE_PATH, if it is not, then these values are - nondeterministically selected from the given ones. - - The formats are represented in EBNF with commas being literal and - with non-terminal symbols defined near the end of this comment. The - formats are: - - - For Image Classification: CSV file(s) with each line in format: - ML_USE,GCS_FILE_PATH,LABEL,LABEL,... GCS_FILE_PATH leads to image - of up to 30MB in size. Supported extensions: .JPEG, .GIF, .PNG, - .WEBP, .BMP, .TIFF, .ICO For MULTICLASS classification type, at - most one LABEL is allowed per image. If an image has not yet been - labeled, then it should be mentioned just once with no LABEL. - Some sample rows: TRAIN,gs://folder/image1.jpg,daisy - TEST,gs://folder/image2.jpg,dandelion,tulip,rose - UNASSIGNED,gs://folder/image3.jpg,daisy - UNASSIGNED,gs://folder/image4.jpg - - - For Image Object Detection: CSV file(s) with each line in format: - ML_USE,GCS_FILE_PATH,(LABEL,BOUNDING_BOX \| ,,,,,,,) - GCS_FILE_PATH leads to image of up to 30MB in size. Supported - extensions: .JPEG, .GIF, .PNG. Each image is assumed to be - exhaustively labeled. The minimum allowed BOUNDING_BOX edge - length is 0.01, and no more than 500 BOUNDING_BOX-es per image - are allowed (one BOUNDING_BOX is defined per line). If an image - has not yet been labeled, then it should be mentioned just once - with no LABEL and the ",,,,,,," in place of the BOUNDING_BOX. For - images which are known to not contain any bounding boxes, they - should be labelled explictly as "NEGATIVE_IMAGE", followed by - ",,,,,,," in place of the BOUNDING_BOX. Sample rows: - TRAIN,gs://folder/image1.png,car,0.1,0.1,,,0.3,0.3,, - TRAIN,gs://folder/image1.png,bike,.7,.6,,,.8,.9,, - UNASSIGNED,gs://folder/im2.png,car,0.1,0.1,0.2,0.1,0.2,0.3,0.1,0.3 - TEST,gs://folder/im3.png,,,,,,,,, - TRAIN,gs://folder/im4.png,NEGATIVE_IMAGE,,,,,,,,, - - - For Video Classification: CSV file(s) with each line in format: - ML_USE,GCS_FILE_PATH where ML_USE VALIDATE value should not be - used. The GCS_FILE_PATH should lead to another .csv file which - describes examples that have given ML_USE, using the following - row format: - GCS_FILE_PATH,(LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END \| ,,) - Here GCS_FILE_PATH leads to a video of up to 50GB in size and up - to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI. - TIME_SEGMENT_START and TIME_SEGMENT_END must be within the length - of the video, and end has to be after the start. Any segment of a - video which has one or more labels on it, is considered a hard - negative for all other labels. Any segment with no labels on it - is considered to be unknown. If a whole video is unknown, then it - shuold be mentioned just once with ",," in place of LABEL, - TIME_SEGMENT_START,TIME_SEGMENT_END. Sample top level CSV file: - TRAIN,gs://folder/train_videos.csv - TEST,gs://folder/test_videos.csv - UNASSIGNED,gs://folder/other_videos.csv Sample rows of a CSV file - for a particular ML_USE: - gs://folder/video1.avi,car,120,180.000021 - gs://folder/video1.avi,bike,150,180.000021 - gs://folder/vid2.avi,car,0,60.5 gs://folder/vid3.avi,,, - - - For Video Object Tracking: CSV file(s) with each line in format: - ML_USE,GCS_FILE_PATH where ML_USE VALIDATE value should not be - used. The GCS_FILE_PATH should lead to another .csv file which - describes examples that have given ML_USE, using one of the - following row format: - GCS_FILE_PATH,LABEL,[INSTANCE_ID],TIMESTAMP,BOUNDING_BOX or - GCS_FILE_PATH,,,,,,,,,, Here GCS_FILE_PATH leads to a video of up - to 50GB in size and up to 3h duration. Supported extensions: - .MOV, .MPEG4, .MP4, .AVI. Providing INSTANCE_IDs can help to - obtain a better model. When a specific labeled entity leaves the - video frame, and shows up afterwards it is not required, albeit - preferable, that the same INSTANCE_ID is given to it. TIMESTAMP - must be within the length of the video, the BOUNDING_BOX is - assumed to be drawn on the closest video's frame to the - TIMESTAMP. Any mentioned by the TIMESTAMP frame is expected to be - exhaustively labeled and no more than 500 BOUNDING_BOX-es per - frame are allowed. If a whole video is unknown, then it should be - mentioned just once with ",,,,,,,,,," in place of LABEL, - [INSTANCE_ID],TIMESTAMP,BOUNDING_BOX. Sample top level CSV file: - TRAIN,gs://folder/train_videos.csv - TEST,gs://folder/test_videos.csv - UNASSIGNED,gs://folder/other_videos.csv Seven sample rows of a - CSV file for a particular ML_USE: - gs://folder/video1.avi,car,1,12.10,0.8,0.8,0.9,0.8,0.9,0.9,0.8,0.9 - gs://folder/video1.avi,car,1,12.90,0.4,0.8,0.5,0.8,0.5,0.9,0.4,0.9 - gs://folder/video1.avi,car,2,12.10,.4,.2,.5,.2,.5,.3,.4,.3 - gs://folder/video1.avi,car,2,12.90,.8,.2,,,.9,.3,, - gs://folder/video1.avi,bike,,12.50,.45,.45,,,.55,.55,, - gs://folder/video2.avi,car,1,0,.1,.9,,,.9,.1,, - gs://folder/video2.avi,,,,,,,,,,, - - - For Text Extraction: CSV file(s) with each line in format: - ML_USE,GCS_FILE_PATH GCS_FILE_PATH leads to a .JSONL (that is, - JSON Lines) file which either imports text in-line or as - documents. Any given .JSONL file must be 100MB or smaller. The - in-line .JSONL file contains, per line, a proto that wraps a - TextSnippet proto (in json representation) followed by one or - more AnnotationPayload protos (called annotations), which have - display_name and text_extraction detail populated. The given text - is expected to be annotated exhaustively, for example, if you - look for animals and text contains "dolphin" that is not labeled, - then "dolphin" is assumed to not be an animal. Any given text - snippet content must be 10KB or smaller, and also be UTF-8 NFC - encoded (ASCII already is). The document .JSONL file contains, - per line, a proto that wraps a Document proto. The Document proto - must have either document_text or input_config set. In - document_text case, the Document proto may also contain the - spatial information of the document, including layout, document - dimension and page number. In input_config case, only PDF - documents are supported now, and each document may be up to 2MB - large. Currently, annotations on documents cannot be specified at - import. Three sample CSV rows: TRAIN,gs://folder/file1.jsonl - VALIDATE,gs://folder/file2.jsonl TEST,gs://folder/file3.jsonl - Sample in-line JSON Lines file for entity extraction (presented - here with artificial line breaks, but the only actual line break - is denoted by \\n).: { "document": { "document_text": {"content": - "dog cat"} "layout": [ { "text_segment": { "start_offset": 0, - "end_offset": 3, }, "page_number": 1, "bounding_poly": { - "normalized_vertices": [ {"x": 0.1, "y": 0.1}, {"x": 0.1, "y": - 0.3}, {"x": 0.3, "y": 0.3}, {"x": 0.3, "y": 0.1}, ], }, - "text_segment_type": TOKEN, }, { "text_segment": { - "start_offset": 4, "end_offset": 7, }, "page_number": 1, - "bounding_poly": { "normalized_vertices": [ {"x": 0.4, "y": 0.1}, - {"x": 0.4, "y": 0.3}, {"x": 0.8, "y": 0.3}, {"x": 0.8, "y": 0.1}, - ], }, "text_segment_type": TOKEN, } - - :: - - ], - "document_dimensions": { - "width": 8.27, - "height": 11.69, - "unit": INCH, - } - "page_count": 1, - }, - "annotations": [ - { - "display_name": "animal", - "text_extraction": {"text_segment": {"start_offset": 0, - "end_offset": 3}} - }, - { - "display_name": "animal", - "text_extraction": {"text_segment": {"start_offset": 4, - "end_offset": 7}} - } - ], - }\n - { - "text_snippet": { - "content": "This dog is good." - }, - "annotations": [ - { - "display_name": "animal", - "text_extraction": { - "text_segment": {"start_offset": 5, "end_offset": 8} - } - } - ] - } - Sample document JSON Lines file (presented here with artificial line - breaks, but the only actual line break is denoted by \n).: - { - "document": { - "input_config": { - "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ] - } - } - } - }\n - { - "document": { - "input_config": { - "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ] - } - } - } - } - - - For Text Classification: CSV file(s) with each line in format: - ML_USE,(TEXT_SNIPPET \| GCS_FILE_PATH),LABEL,LABEL,... - TEXT_SNIPPET and GCS_FILE_PATH are distinguished by a pattern. If - the column content is a valid gcs file path, i.e. prefixed by - "gs://", it will be treated as a GCS_FILE_PATH, else if the - content is enclosed within double quotes (""), it is treated as a - TEXT_SNIPPET. In the GCS_FILE_PATH case, the path must lead to a - .txt file with UTF-8 encoding, for example, - "gs://folder/content.txt", and the content in it is extracted as - a text snippet. In TEXT_SNIPPET case, the column content - excluding quotes is treated as to be imported text snippet. In - both cases, the text snippet/file size must be within 128kB. - Maximum 100 unique labels are allowed per CSV row. Sample rows: - TRAIN,"They have bad food and very rude",RudeService,BadFood - TRAIN,gs://folder/content.txt,SlowService TEST,"Typically always - bad service there.",RudeService VALIDATE,"Stomach ache to - go.",BadFood - - - For Text Sentiment: CSV file(s) with each line in format: - ML_USE,(TEXT_SNIPPET \| GCS_FILE_PATH),SENTIMENT TEXT_SNIPPET and - GCS_FILE_PATH are distinguished by a pattern. If the column - content is a valid gcs file path, that is, prefixed by "gs://", - it is treated as a GCS_FILE_PATH, otherwise it is treated as a - TEXT_SNIPPET. In the GCS_FILE_PATH case, the path must lead to a - .txt file with UTF-8 encoding, for example, - "gs://folder/content.txt", and the content in it is extracted as - a text snippet. In TEXT_SNIPPET case, the column content itself - is treated as to be imported text snippet. In both cases, the - text snippet must be up to 500 characters long. Sample rows: - TRAIN,"@freewrytin this is way too good for your product",2 - TRAIN,"I need this product so bad",3 TEST,"Thank you for this - product.",4 VALIDATE,gs://folder/content.txt,2 - - - For Tables: Either - [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] - or - - [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source] - can be used. All inputs is concatenated into a single - - [primary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_name] - For gcs_source: CSV file(s), where the first row of the first file - is the header, containing unique column names. If the first row of a - subsequent file is the same as the header, then it is also treated - as a header. All other rows contain values for the corresponding - columns. Each .CSV file by itself must be 10GB or smaller, and their - total size must be 100GB or smaller. First three sample rows of a - CSV file: "Id","First Name","Last Name","Dob","Addresses" - - "1","John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]" - - "2","Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]} - For bigquery_source: An URI of a BigQuery table. The user data size - of the BigQuery table must be 100GB or smaller. An imported table - must have between 2 and 1,000 columns, inclusive, and between 1000 - and 100,000,000 rows, inclusive. There are at most 5 import data - running in parallel. Definitions: ML_USE = "TRAIN" \| "VALIDATE" \| - "TEST" \| "UNASSIGNED" Describes how the given example (file) should - be used for model training. "UNASSIGNED" can be used when user has - no preference. GCS_FILE_PATH = A path to file on GCS, e.g. - "gs://folder/image1.png". LABEL = A display name of an object on an - image, video etc., e.g. "dog". Must be up to 32 characters long and - can consist only of ASCII Latin letters A-Z and a-z, underscores(_), - and ASCII digits 0-9. For each label an AnnotationSpec is created - which display_name becomes the label; AnnotationSpecs are given back - in predictions. INSTANCE_ID = A positive integer that identifies a - specific instance of a labeled entity on an example. Used e.g. to - track two cars on a video while being able to tell apart which one - is which. BOUNDING_BOX = VERTEX,VERTEX,VERTEX,VERTEX \| - VERTEX,,,VERTEX,, A rectangle parallel to the frame of the example - (image, video). If 4 vertices are given they are connected by edges - in the order provided, if 2 are given they are recognized as - diagonally opposite vertices of the rectangle. VERTEX = - COORDINATE,COORDINATE First coordinate is horizontal (x), the second - is vertical (y). COORDINATE = A float in 0 to 1 range, relative to - total length of image or video in given dimension. For fractions the - leading non-decimal 0 can be omitted (i.e. 0.3 = .3). Point 0,0 is - in top left. TIME_SEGMENT_START = TIME_OFFSET Expresses a beginning, - inclusive, of a time segment within an example that has a time - dimension (e.g. video). TIME_SEGMENT_END = TIME_OFFSET Expresses an - end, exclusive, of a time segment within an example that has a time - dimension (e.g. video). TIME_OFFSET = A number of seconds as - measured from the start of an example (e.g. video). Fractions are - allowed, up to a microsecond precision. "inf" is allowed, and it - means the end of the example. TEXT_SNIPPET = A content of a text - snippet, UTF-8 encoded, enclosed within double quotes (""). - SENTIMENT = An integer between 0 and - Dataset.text_sentiment_dataset_metadata.sentiment_max (inclusive). - Describes the ordinal of the sentiment - higher value means a more - positive sentiment. All the values are completely relative, i.e. - neither 0 needs to mean a negative or neutral sentiment nor - sentiment_max needs to mean a positive one - it is just required - that 0 is the least positive sentiment in the data, and - sentiment_max is the most positive one. The SENTIMENT shouldn't be - confused with "score" or "magnitude" from the previous Natural - Language Sentiment Analysis API. All SENTIMENT values between 0 and - sentiment_max must be represented in the imported data. On - prediction the same 0 to sentiment_max range will be used. The - difference between neighboring sentiment values needs not to be - uniform, e.g. 1 and 2 may be similar whereas the difference between - 2 and 3 may be huge. - - Errors: If any of the provided CSV files can't be parsed or if more - than certain percent of CSV rows cannot be processed then the - operation fails and nothing is imported. Regardless of overall - success or failure the per-row failures, up to a certain count cap, - is listed in Operation.metadata.partial_failures. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_source (google.cloud.automl_v1beta1.types.GcsSource): - The Google Cloud Storage location for the input content. In - ImportData, the gcs_source points to a csv with structure - described in the comment. - - This field is a member of `oneof`_ ``source``. - bigquery_source (google.cloud.automl_v1beta1.types.BigQuerySource): - The BigQuery location for the input content. - - This field is a member of `oneof`_ ``source``. - params (MutableMapping[str, str]): - Additional domain-specific parameters describing the - semantic of the imported data, any string must be up to - 25000 characters long. - - - For Tables: ``schema_inference_version`` - (integer) - Required. The version of the algorithm that should be - used for the initial inference of the schema (columns' - DataTypes) of the table the data is being imported into. - Allowed values: "1". - """ - - gcs_source: 'GcsSource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='GcsSource', - ) - bigquery_source: 'BigQuerySource' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='BigQuerySource', - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class BatchPredictInputConfig(proto.Message): - r"""Input configuration for BatchPredict Action. - - The format of input depends on the ML problem of the model used for - prediction. As input source the - [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] is - expected, unless specified otherwise. - - The formats are represented in EBNF with commas being literal and - with non-terminal symbols defined near the end of this comment. The - formats are: - - - For Image Classification: CSV file(s) with each line having just - a single column: GCS_FILE_PATH which leads to image of up to 30MB - in size. Supported extensions: .JPEG, .GIF, .PNG. This path is - treated as the ID in the Batch predict output. Three sample rows: - gs://folder/image1.jpeg gs://folder/image2.gif - gs://folder/image3.png - - - For Image Object Detection: CSV file(s) with each line having - just a single column: GCS_FILE_PATH which leads to image of up to - 30MB in size. Supported extensions: .JPEG, .GIF, .PNG. This path - is treated as the ID in the Batch predict output. Three sample - rows: gs://folder/image1.jpeg gs://folder/image2.gif - gs://folder/image3.png - - - For Video Classification: CSV file(s) with each line in format: - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END GCS_FILE_PATH - leads to video of up to 50GB in size and up to 3h duration. - Supported extensions: .MOV, .MPEG4, .MP4, .AVI. - TIME_SEGMENT_START and TIME_SEGMENT_END must be within the length - of the video, and end has to be after the start. Three sample - rows: gs://folder/video1.mp4,10,40 gs://folder/video1.mp4,20,60 - gs://folder/vid2.mov,0,inf - - - For Video Object Tracking: CSV file(s) with each line in format: - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END GCS_FILE_PATH - leads to video of up to 50GB in size and up to 3h duration. - Supported extensions: .MOV, .MPEG4, .MP4, .AVI. - TIME_SEGMENT_START and TIME_SEGMENT_END must be within the length - of the video, and end has to be after the start. Three sample - rows: gs://folder/video1.mp4,10,240 - gs://folder/video1.mp4,300,360 gs://folder/vid2.mov,0,inf - - - For Text Classification: CSV file(s) with each line having just a - single column: GCS_FILE_PATH \| TEXT_SNIPPET Any given text file - can have size upto 128kB. Any given text snippet content must - have 60,000 characters or less. Three sample rows: - gs://folder/text1.txt "Some text content to predict" - gs://folder/text3.pdf Supported file extensions: .txt, .pdf - - - For Text Sentiment: CSV file(s) with each line having just a - single column: GCS_FILE_PATH \| TEXT_SNIPPET Any given text file - can have size upto 128kB. Any given text snippet content must - have 500 characters or less. Three sample rows: - gs://folder/text1.txt "Some text content to predict" - gs://folder/text3.pdf Supported file extensions: .txt, .pdf - - - For Text Extraction .JSONL (i.e. JSON Lines) file(s) which either - provide text in-line or as documents (for a single BatchPredict - call only one of the these formats may be used). The in-line - .JSONL file(s) contain per line a proto that wraps a temporary - user-assigned TextSnippet ID (string up to 2000 characters long) - called "id", a TextSnippet proto (in json representation) and - zero or more TextFeature protos. Any given text snippet content - must have 30,000 characters or less, and also be UTF-8 NFC - encoded (ASCII already is). The IDs provided should be unique. - The document .JSONL file(s) contain, per line, a proto that wraps - a Document proto with input_config set. Only PDF documents are - supported now, and each document must be up to 2MB large. Any - given .JSONL file must be 100MB or smaller, and no more than 20 - files may be given. Sample in-line JSON Lines file (presented - here with artificial line breaks, but the only actual line break - is denoted by \\n): { "id": "my_first_id", "text_snippet": { - "content": "dog car cat"}, "text_features": [ { "text_segment": - {"start_offset": 4, "end_offset": 6}, "structural_type": - PARAGRAPH, "bounding_poly": { "normalized_vertices": [ {"x": 0.1, - "y": 0.1}, {"x": 0.1, "y": 0.3}, {"x": 0.3, "y": 0.3}, {"x": 0.3, - "y": 0.1}, ] }, } ], }\n { "id": "2", "text_snippet": { - "content": "An elaborate content", "mime_type": "text/plain" } } - Sample document JSON Lines file (presented here with artificial - line breaks, but the only actual line break is denoted by \\n).: - { "document": { "input_config": { "gcs_source": { "input_uris": [ - "gs://folder/document1.pdf" ] } } } }\n { "document": { - "input_config": { "gcs_source": { "input_uris": [ - "gs://folder/document2.pdf" ] } } } } - - - For Tables: Either - [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] - or - - [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source]. - GCS case: CSV file(s), each by itself 10GB or smaller and total size - must be 100GB or smaller, where first file must have a header - containing column names. If the first row of a subsequent file is - the same as the header, then it is also treated as a header. All - other rows contain values for the corresponding columns. The column - names must contain the model's - - [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] - - [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name] - (order doesn't matter). The columns corresponding to the model's - input feature column specs must contain values compatible with the - column spec's data types. Prediction on all the rows, i.e. the CSV - lines, will be attempted. For FORECASTING - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]: - all columns having - - [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType] - type will be ignored. First three sample rows of a CSV file: "First - Name","Last Name","Dob","Addresses" - - "John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]" - - "Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]} - BigQuery case: An URI of a BigQuery table. The user data size of the - BigQuery table must be 100GB or smaller. The column names must - contain the model's - - [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] - - [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name] - (order doesn't matter). The columns corresponding to the model's - input feature column specs must contain values compatible with the - column spec's data types. Prediction on all the rows of the table - will be attempted. For FORECASTING - - [prediction_type][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]: - all columns having - - [TIME_SERIES_AVAILABLE_PAST_ONLY][google.cloud.automl.v1beta1.ColumnSpec.ForecastingMetadata.ColumnType] - type will be ignored. - - Definitions: GCS_FILE_PATH = A path to file on GCS, e.g. - "gs://folder/video.avi". TEXT_SNIPPET = A content of a text snippet, - UTF-8 encoded, enclosed within double quotes ("") TIME_SEGMENT_START - = TIME_OFFSET Expresses a beginning, inclusive, of a time segment - within an example that has a time dimension (e.g. video). - TIME_SEGMENT_END = TIME_OFFSET Expresses an end, exclusive, of a - time segment within an example that has a time dimension (e.g. - video). TIME_OFFSET = A number of seconds as measured from the start - of an example (e.g. video). Fractions are allowed, up to a - microsecond precision. "inf" is allowed and it means the end of the - example. - - Errors: If any of the provided CSV files can't be parsed or if more - than certain percent of CSV rows cannot be processed then the - operation fails and prediction does not happen. Regardless of - overall success or failure the per-row failures, up to a certain - count cap, will be listed in Operation.metadata.partial_failures. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_source (google.cloud.automl_v1beta1.types.GcsSource): - The Google Cloud Storage location for the - input content. - - This field is a member of `oneof`_ ``source``. - bigquery_source (google.cloud.automl_v1beta1.types.BigQuerySource): - The BigQuery location for the input content. - - This field is a member of `oneof`_ ``source``. - """ - - gcs_source: 'GcsSource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='GcsSource', - ) - bigquery_source: 'BigQuerySource' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='BigQuerySource', - ) - - -class DocumentInputConfig(proto.Message): - r"""Input configuration of a - [Document][google.cloud.automl.v1beta1.Document]. - - Attributes: - gcs_source (google.cloud.automl_v1beta1.types.GcsSource): - The Google Cloud Storage location of the - document file. Only a single path should be - given. Max supported size: 512MB. - Supported extensions: .PDF. - """ - - gcs_source: 'GcsSource' = proto.Field( - proto.MESSAGE, - number=1, - message='GcsSource', - ) - - -class OutputConfig(proto.Message): - r"""- For Translation: CSV file ``translation.csv``, with each line in - format: ML_USE,GCS_FILE_PATH GCS_FILE_PATH leads to a .TSV file - which describes examples that have given ML_USE, using the - following row format per line: TEXT_SNIPPET (in source language) - \\t TEXT_SNIPPET (in target language) - - - For Tables: Output depends on whether the dataset was imported - from GCS or BigQuery. GCS case: - - [gcs_destination][google.cloud.automl.v1beta1.OutputConfig.gcs_destination] - must be set. Exported are CSV file(s) ``tables_1.csv``, - ``tables_2.csv``,...,\ ``tables_N.csv`` with each having as header - line the table's column names, and all other lines contain values - for the header columns. BigQuery case: - - [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination] - pointing to a BigQuery project must be set. In the given project a - new dataset will be created with name - - ``export_data__`` - where will be made BigQuery-dataset-name compatible (e.g. most - special characters will become underscores), and timestamp will be - in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In that - dataset a new table called ``primary_table`` will be created, and - filled with precisely the same data as this obtained on import. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.automl_v1beta1.types.GcsDestination): - The Google Cloud Storage location where the output is to be - written to. For Image Object Detection, Text Extraction, - Video Classification and Tables, in the given directory a - new directory will be created with name: export_data-- where - timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. - All export output will be written into that directory. - - This field is a member of `oneof`_ ``destination``. - bigquery_destination (google.cloud.automl_v1beta1.types.BigQueryDestination): - The BigQuery location where the output is to - be written to. - - This field is a member of `oneof`_ ``destination``. - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - bigquery_destination: 'BigQueryDestination' = proto.Field( - proto.MESSAGE, - number=2, - oneof='destination', - message='BigQueryDestination', - ) - - -class BatchPredictOutputConfig(proto.Message): - r"""Output configuration for BatchPredict Action. - - As destination the - - [gcs_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.gcs_destination] - must be set unless specified otherwise for a domain. If - gcs_destination is set then in the given directory a new directory - is created. Its name will be "prediction--", where timestamp is in - YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. The contents of it depends - on the ML problem the predictions are made for. - - - For Image Classification: In the created directory files - ``image_classification_1.jsonl``, - ``image_classification_2.jsonl``,...,\ ``image_classification_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of the successfully predicted images and annotations. A - single image will be listed only once with all its annotations, - and its annotations will never be split across files. Each .JSONL - file will contain, per line, a JSON representation of a proto - that wraps image's "ID" : "" followed by a list of zero - or more AnnotationPayload protos (called annotations), which have - classification detail populated. If prediction for any image - failed (partially or completely), then an additional - ``errors_1.jsonl``, ``errors_2.jsonl``,..., ``errors_N.jsonl`` - files will be created (N depends on total number of failed - predictions). These files will have a JSON representation of a - proto that wraps the same "ID" : "" but here followed - by exactly one - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only ``code`` and ``message``\ fields. - - - For Image Object Detection: In the created directory files - ``image_object_detection_1.jsonl``, - ``image_object_detection_2.jsonl``,...,\ ``image_object_detection_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of the successfully predicted images and annotations. Each - .JSONL file will contain, per line, a JSON representation of a - proto that wraps image's "ID" : "" followed by a list - of zero or more AnnotationPayload protos (called annotations), - which have image_object_detection detail populated. A single - image will be listed only once with all its annotations, and its - annotations will never be split across files. If prediction for - any image failed (partially or completely), then additional - ``errors_1.jsonl``, ``errors_2.jsonl``,..., ``errors_N.jsonl`` - files will be created (N depends on total number of failed - predictions). These files will have a JSON representation of a - proto that wraps the same "ID" : "" but here followed - by exactly one - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only ``code`` and ``message``\ fields. - - - For Video Classification: In the created directory a - video_classification.csv file, and a .JSON file per each video - classification requested in the input (i.e. each line in given - CSV(s)), will be created. - - :: - - The format of video_classification.csv is: - - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS - where: GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 - to 1 the prediction input lines (i.e. video_classification.csv has - precisely the same number of lines as the prediction input had.) - JSON_FILE_NAME = Name of .JSON file in the output directory, which - contains prediction responses for the video time segment. STATUS = - "OK" if prediction completed successfully, or an error code with - message otherwise. If STATUS is not "OK" then the .JSON file for - that line may not exist or be empty. - - :: - - Each .JSON file, assuming STATUS is "OK", will contain a list of - AnnotationPayload protos in JSON format, which are the predictions - for the video time segment the file is assigned to in the - video_classification.csv. All AnnotationPayload protos will have - video_classification field set, and will be sorted by - video_classification.type field (note that the returned types are - governed by `classifaction_types` parameter in - [PredictService.BatchPredictRequest.params][]). - - - For Video Object Tracking: In the created directory a - video_object_tracking.csv file will be created, and multiple - files video_object_trackinng_1.json, - video_object_trackinng_2.json,..., video_object_trackinng_N.json, - where N is the number of requests in the input (i.e. the number - of lines in given CSV(s)). - - :: - - The format of video_object_tracking.csv is: - - GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS - where: GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 - to 1 the prediction input lines (i.e. video_object_tracking.csv has - precisely the same number of lines as the prediction input had.) - JSON_FILE_NAME = Name of .JSON file in the output directory, which - contains prediction responses for the video time segment. STATUS = - "OK" if prediction completed successfully, or an error code with - message otherwise. If STATUS is not "OK" then the .JSON file for - that line may not exist or be empty. - - :: - - Each .JSON file, assuming STATUS is "OK", will contain a list of - AnnotationPayload protos in JSON format, which are the predictions - for each frame of the video time segment the file is assigned to in - video_object_tracking.csv. All AnnotationPayload protos will have - video_object_tracking field set. - - - For Text Classification: In the created directory files - ``text_classification_1.jsonl``, - ``text_classification_2.jsonl``,...,\ ``text_classification_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of inputs and annotations found. - - :: - - Each .JSONL file will contain, per line, a JSON representation of a - proto that wraps input text snippet or input text file and a list of - zero or more AnnotationPayload protos (called annotations), which - have classification detail populated. A single text snippet or file - will be listed only once with all its annotations, and its - annotations will never be split across files. - - If prediction for any text snippet or file failed (partially or - completely), then additional `errors_1.jsonl`, `errors_2.jsonl`,..., - `errors_N.jsonl` files will be created (N depends on total number of - failed predictions). These files will have a JSON representation of a - proto that wraps input text snippet or input text file followed by - exactly one - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only ``code`` and ``message``. - - - For Text Sentiment: In the created directory files - ``text_sentiment_1.jsonl``, - ``text_sentiment_2.jsonl``,...,\ ``text_sentiment_N.jsonl`` will - be created, where N may be 1, and depends on the total number of - inputs and annotations found. - - :: - - Each .JSONL file will contain, per line, a JSON representation of a - proto that wraps input text snippet or input text file and a list of - zero or more AnnotationPayload protos (called annotations), which - have text_sentiment detail populated. A single text snippet or file - will be listed only once with all its annotations, and its - annotations will never be split across files. - - If prediction for any text snippet or file failed (partially or - completely), then additional `errors_1.jsonl`, `errors_2.jsonl`,..., - `errors_N.jsonl` files will be created (N depends on total number of - failed predictions). These files will have a JSON representation of a - proto that wraps input text snippet or input text file followed by - exactly one - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only ``code`` and ``message``. - - - For Text Extraction: In the created directory files - ``text_extraction_1.jsonl``, - ``text_extraction_2.jsonl``,...,\ ``text_extraction_N.jsonl`` - will be created, where N may be 1, and depends on the total - number of inputs and annotations found. The contents of these - .JSONL file(s) depend on whether the input used inline text, or - documents. If input was inline, then each .JSONL file will - contain, per line, a JSON representation of a proto that wraps - given in request text snippet's "id" (if specified), followed by - input text snippet, and a list of zero or more AnnotationPayload - protos (called annotations), which have text_extraction detail - populated. A single text snippet will be listed only once with - all its annotations, and its annotations will never be split - across files. If input used documents, then each .JSONL file will - contain, per line, a JSON representation of a proto that wraps - given in request document proto, followed by its OCR-ed - representation in the form of a text snippet, finally followed by - a list of zero or more AnnotationPayload protos (called - annotations), which have text_extraction detail populated and - refer, via their indices, to the OCR-ed text snippet. A single - document (and its text snippet) will be listed only once with all - its annotations, and its annotations will never be split across - files. If prediction for any text snippet failed (partially or - completely), then additional ``errors_1.jsonl``, - ``errors_2.jsonl``,..., ``errors_N.jsonl`` files will be created - (N depends on total number of failed predictions). These files - will have a JSON representation of a proto that wraps either the - "id" : "" (in case of inline) or the document proto (in - case of document) but here followed by exactly one - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - containing only ``code`` and ``message``. - - - For Tables: Output depends on whether - - [gcs_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.gcs_destination] - or - - [bigquery_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.bigquery_destination] - is set (either is allowed). GCS case: In the created directory files - ``tables_1.csv``, ``tables_2.csv``,..., ``tables_N.csv`` will be - created, where N may be 1, and depends on the total number of the - successfully predicted rows. For all CLASSIFICATION - - [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]: - Each .csv file will contain a header, listing all columns' - - [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name] - given on input followed by M target column names in the format of - - "<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - - [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>\_\_score" - where M is the number of distinct target values, i.e. number of - distinct values in the target column of the table used to train the - model. Subsequent lines will contain the respective values of - successfully predicted rows, with the last, i.e. the target, columns - having the corresponding prediction - [scores][google.cloud.automl.v1beta1.TablesAnnotation.score]. For - REGRESSION and FORECASTING - - [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type]: - Each .csv file will contain a header, listing all columns' - [display_name-s][google.cloud.automl.v1beta1.display_name] given on - input followed by the predicted target column with name in the - format of - - "predicted_<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - - [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>" - Subsequent lines will contain the respective values of successfully - predicted rows, with the last, i.e. the target, column having the - predicted target value. If prediction for any rows failed, then an - additional ``errors_1.csv``, ``errors_2.csv``,..., ``errors_N.csv`` - will be created (N depends on total number of failed rows). These - files will have analogous format as ``tables_*.csv``, but always - with a single target column having - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - represented as a JSON string, and containing only ``code`` and - ``message``. BigQuery case: - - [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination] - pointing to a BigQuery project must be set. In the given project a - new dataset will be created with name - ``prediction__`` - where will be made BigQuery-dataset-name compatible (e.g. most - special characters will become underscores), and timestamp will be - in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the - dataset two tables will be created, ``predictions``, and ``errors``. - The ``predictions`` table's column names will be the input columns' - - [display_name-s][google.cloud.automl.v1beta1.ColumnSpec.display_name] - followed by the target column with name in the format of - - "predicted_<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - - [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>" - The input feature columns will contain the respective values of - successfully predicted rows, with the target column having an ARRAY - of - - [AnnotationPayloads][google.cloud.automl.v1beta1.AnnotationPayload], - represented as STRUCT-s, containing - [TablesAnnotation][google.cloud.automl.v1beta1.TablesAnnotation]. - The ``errors`` table contains rows for which the prediction has - failed, it has analogous input columns while the target column name - is in the format of - - "errors_<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - - [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>", - and as a value has - - [``google.rpc.Status``](https: - //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) - represented as a STRUCT, and containing only ``code`` and - ``message``. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.automl_v1beta1.types.GcsDestination): - The Google Cloud Storage location of the - directory where the output is to be written to. - - This field is a member of `oneof`_ ``destination``. - bigquery_destination (google.cloud.automl_v1beta1.types.BigQueryDestination): - The BigQuery location where the output is to - be written to. - - This field is a member of `oneof`_ ``destination``. - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - bigquery_destination: 'BigQueryDestination' = proto.Field( - proto.MESSAGE, - number=2, - oneof='destination', - message='BigQueryDestination', - ) - - -class ModelExportOutputConfig(proto.Message): - r"""Output configuration for ModelExport Action. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_destination (google.cloud.automl_v1beta1.types.GcsDestination): - The Google Cloud Storage location where the model is to be - written to. This location may only be set for the following - model formats: "tflite", "edgetpu_tflite", "tf_saved_model", - "tf_js", "core_ml". - - Under the directory given as the destination a new one with - name "model-export--", where timestamp is in - YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format, will be created. - Inside the model and any of its supporting files will be - written. - - This field is a member of `oneof`_ ``destination``. - gcr_destination (google.cloud.automl_v1beta1.types.GcrDestination): - The GCR location where model image is to be - pushed to. This location may only be set for the - following model formats: - - "docker". - - The model image will be created under the given - URI. - - This field is a member of `oneof`_ ``destination``. - model_format (str): - The format in which the model must be exported. The - available, and default, formats depend on the problem and - model type (if given problem and type combination doesn't - have a format listed, it means its models are not - exportable): - - - For Image Classification mobile-low-latency-1, - mobile-versatile-1, mobile-high-accuracy-1: "tflite" - (default), "edgetpu_tflite", "tf_saved_model", "tf_js", - "docker". - - - For Image Classification mobile-core-ml-low-latency-1, - mobile-core-ml-versatile-1, - mobile-core-ml-high-accuracy-1: "core_ml" (default). - - - For Image Object Detection mobile-low-latency-1, - mobile-versatile-1, mobile-high-accuracy-1: "tflite", - "tf_saved_model", "tf_js". - - - For Video Classification cloud, "tf_saved_model". - - - For Video Object Tracking cloud, "tf_saved_model". - - - For Video Object Tracking mobile-versatile-1: "tflite", - "edgetpu_tflite", "tf_saved_model", "docker". - - - For Video Object Tracking mobile-coral-versatile-1: - "tflite", "edgetpu_tflite", "docker". - - - For Video Object Tracking mobile-coral-low-latency-1: - "tflite", "edgetpu_tflite", "docker". - - - For Video Object Tracking mobile-jetson-versatile-1: - "tf_saved_model", "docker". - - - For Tables: "docker". - - Formats description: - - - tflite - Used for Android mobile devices. - - edgetpu_tflite - Used for `Edge - TPU `__ devices. - - tf_saved_model - A tensorflow model in SavedModel format. - - tf_js - A - `TensorFlow.js `__ model - that can be used in the browser and in Node.js using - JavaScript. - - docker - Used for Docker containers. Use the params field - to customize the container. The container is verified to - work correctly on ubuntu 16.04 operating system. See more - at [containers - - quickstart](https: - //cloud.google.com/vision/automl/docs/containers-gcs-quickstart) - - - core_ml - Used for iOS mobile devices. - params (MutableMapping[str, str]): - Additional model-type and format specific parameters - describing the requirements for the to be exported model - files, any string must be up to 25000 characters long. - - - For ``docker`` format: ``cpu_architecture`` - (string) - "x86_64" (default). ``gpu_architecture`` - (string) - "none" (default), "nvidia". - """ - - gcs_destination: 'GcsDestination' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination', - message='GcsDestination', - ) - gcr_destination: 'GcrDestination' = proto.Field( - proto.MESSAGE, - number=3, - oneof='destination', - message='GcrDestination', - ) - model_format: str = proto.Field( - proto.STRING, - number=4, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class ExportEvaluatedExamplesOutputConfig(proto.Message): - r"""Output configuration for ExportEvaluatedExamples Action. Note that - this call is available only for 30 days since the moment the model - was evaluated. The output depends on the domain, as follows (note - that only examples from the TEST set are exported): - - - For Tables: - - [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination] - pointing to a BigQuery project must be set. In the given project a - new dataset will be created with name - - ``export_evaluated_examples__`` - where will be made BigQuery-dataset-name compatible (e.g. most - special characters will become underscores), and timestamp will be - in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the - dataset an ``evaluated_examples`` table will be created. It will - have all the same columns as the - - [primary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_spec_id] - of the [dataset][google.cloud.automl.v1beta1.Model.dataset_id] from - which the model was created, as they were at the moment of model's - evaluation (this includes the target column with its ground truth), - followed by a column called "predicted_". That last - column will contain the model's prediction result for each - respective row, given as ARRAY of - [AnnotationPayloads][google.cloud.automl.v1beta1.AnnotationPayload], - represented as STRUCT-s, containing - [TablesAnnotation][google.cloud.automl.v1beta1.TablesAnnotation]. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bigquery_destination (google.cloud.automl_v1beta1.types.BigQueryDestination): - The BigQuery location where the output is to - be written to. - - This field is a member of `oneof`_ ``destination``. - """ - - bigquery_destination: 'BigQueryDestination' = proto.Field( - proto.MESSAGE, - number=2, - oneof='destination', - message='BigQueryDestination', - ) - - -class GcsSource(proto.Message): - r"""The Google Cloud Storage location for the input content. - - Attributes: - input_uris (MutableSequence[str]): - Required. Google Cloud Storage URIs to input files, up to - 2000 characters long. Accepted forms: - - - Full object path, e.g. gs://bucket/directory/object.csv - """ - - input_uris: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class BigQuerySource(proto.Message): - r"""The BigQuery location for the input content. - - Attributes: - input_uri (str): - Required. BigQuery URI to a table, up to 2000 characters - long. Accepted forms: - - - BigQuery path e.g. bq://projectId.bqDatasetId.bqTableId - """ - - input_uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GcsDestination(proto.Message): - r"""The Google Cloud Storage location where the output is to be - written to. - - Attributes: - output_uri_prefix (str): - Required. Google Cloud Storage URI to output directory, up - to 2000 characters long. Accepted forms: - - - Prefix path: gs://bucket/directory The requesting user - must have write permission to the bucket. The directory - is created if it doesn't exist. - """ - - output_uri_prefix: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryDestination(proto.Message): - r"""The BigQuery location for the output content. - - Attributes: - output_uri (str): - Required. BigQuery URI to a project, up to 2000 characters - long. Accepted forms: - - - BigQuery path e.g. bq://projectId - """ - - output_uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GcrDestination(proto.Message): - r"""The GCR location where the image must be pushed to. - - Attributes: - output_uri (str): - Required. Google Contained Registry URI of the new image, up - to 2000 characters long. See - - https: //cloud.google.com/container-registry/do // - cs/pushing-and-pulling#pushing_an_image_to_a_registry - Accepted forms: - - - [HOSTNAME]/[PROJECT-ID]/[IMAGE] - - [HOSTNAME]/[PROJECT-ID]/[IMAGE]:[TAG] - - The requesting user must have permission to push images the - project. - """ - - output_uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/model.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/model.py deleted file mode 100644 index 49b096690401..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/model.py +++ /dev/null @@ -1,208 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import image -from google.cloud.automl_v1beta1.types import tables -from google.cloud.automl_v1beta1.types import text -from google.cloud.automl_v1beta1.types import translation -from google.cloud.automl_v1beta1.types import video -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'Model', - }, -) - - -class Model(proto.Message): - r"""API proto representing a trained machine learning model. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation_model_metadata (google.cloud.automl_v1beta1.types.TranslationModelMetadata): - Metadata for translation models. - - This field is a member of `oneof`_ ``model_metadata``. - image_classification_model_metadata (google.cloud.automl_v1beta1.types.ImageClassificationModelMetadata): - Metadata for image classification models. - - This field is a member of `oneof`_ ``model_metadata``. - text_classification_model_metadata (google.cloud.automl_v1beta1.types.TextClassificationModelMetadata): - Metadata for text classification models. - - This field is a member of `oneof`_ ``model_metadata``. - image_object_detection_model_metadata (google.cloud.automl_v1beta1.types.ImageObjectDetectionModelMetadata): - Metadata for image object detection models. - - This field is a member of `oneof`_ ``model_metadata``. - video_classification_model_metadata (google.cloud.automl_v1beta1.types.VideoClassificationModelMetadata): - Metadata for video classification models. - - This field is a member of `oneof`_ ``model_metadata``. - video_object_tracking_model_metadata (google.cloud.automl_v1beta1.types.VideoObjectTrackingModelMetadata): - Metadata for video object tracking models. - - This field is a member of `oneof`_ ``model_metadata``. - text_extraction_model_metadata (google.cloud.automl_v1beta1.types.TextExtractionModelMetadata): - Metadata for text extraction models. - - This field is a member of `oneof`_ ``model_metadata``. - tables_model_metadata (google.cloud.automl_v1beta1.types.TablesModelMetadata): - Metadata for Tables models. - - This field is a member of `oneof`_ ``model_metadata``. - text_sentiment_model_metadata (google.cloud.automl_v1beta1.types.TextSentimentModelMetadata): - Metadata for text sentiment models. - - This field is a member of `oneof`_ ``model_metadata``. - name (str): - Output only. Resource name of the model. Format: - ``projects/{project_id}/locations/{location_id}/models/{model_id}`` - display_name (str): - Required. The name of the model to show in the interface. - The name can be up to 32 characters long and can consist - only of ASCII Latin letters A-Z and a-z, underscores (_), - and ASCII digits 0-9. It must start with a letter. - dataset_id (str): - Required. The resource ID of the dataset used - to create the model. The dataset must come from - the same ancestor project and location. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the model - training finished and can be used for - prediction. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when this model was - last updated. - deployment_state (google.cloud.automl_v1beta1.types.Model.DeploymentState): - Output only. Deployment state of the model. A - model can only serve prediction requests after - it gets deployed. - """ - class DeploymentState(proto.Enum): - r"""Deployment state of the model. - - Values: - DEPLOYMENT_STATE_UNSPECIFIED (0): - Should not be used, an un-set enum has this - value by default. - DEPLOYED (1): - Model is deployed. - UNDEPLOYED (2): - Model is not deployed. - """ - DEPLOYMENT_STATE_UNSPECIFIED = 0 - DEPLOYED = 1 - UNDEPLOYED = 2 - - translation_model_metadata: translation.TranslationModelMetadata = proto.Field( - proto.MESSAGE, - number=15, - oneof='model_metadata', - message=translation.TranslationModelMetadata, - ) - image_classification_model_metadata: image.ImageClassificationModelMetadata = proto.Field( - proto.MESSAGE, - number=13, - oneof='model_metadata', - message=image.ImageClassificationModelMetadata, - ) - text_classification_model_metadata: text.TextClassificationModelMetadata = proto.Field( - proto.MESSAGE, - number=14, - oneof='model_metadata', - message=text.TextClassificationModelMetadata, - ) - image_object_detection_model_metadata: image.ImageObjectDetectionModelMetadata = proto.Field( - proto.MESSAGE, - number=20, - oneof='model_metadata', - message=image.ImageObjectDetectionModelMetadata, - ) - video_classification_model_metadata: video.VideoClassificationModelMetadata = proto.Field( - proto.MESSAGE, - number=23, - oneof='model_metadata', - message=video.VideoClassificationModelMetadata, - ) - video_object_tracking_model_metadata: video.VideoObjectTrackingModelMetadata = proto.Field( - proto.MESSAGE, - number=21, - oneof='model_metadata', - message=video.VideoObjectTrackingModelMetadata, - ) - text_extraction_model_metadata: text.TextExtractionModelMetadata = proto.Field( - proto.MESSAGE, - number=19, - oneof='model_metadata', - message=text.TextExtractionModelMetadata, - ) - tables_model_metadata: tables.TablesModelMetadata = proto.Field( - proto.MESSAGE, - number=24, - oneof='model_metadata', - message=tables.TablesModelMetadata, - ) - text_sentiment_model_metadata: text.TextSentimentModelMetadata = proto.Field( - proto.MESSAGE, - number=22, - oneof='model_metadata', - message=text.TextSentimentModelMetadata, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - deployment_state: DeploymentState = proto.Field( - proto.ENUM, - number=8, - enum=DeploymentState, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/model_evaluation.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/model_evaluation.py deleted file mode 100644 index fcafcb518944..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/model_evaluation.py +++ /dev/null @@ -1,196 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import classification -from google.cloud.automl_v1beta1.types import detection -from google.cloud.automl_v1beta1.types import regression -from google.cloud.automl_v1beta1.types import text_extraction -from google.cloud.automl_v1beta1.types import text_sentiment -from google.cloud.automl_v1beta1.types import translation -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'ModelEvaluation', - }, -) - - -class ModelEvaluation(proto.Message): - r"""Evaluation results of a model. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - classification_evaluation_metrics (google.cloud.automl_v1beta1.types.ClassificationEvaluationMetrics): - Model evaluation metrics for image, text, - video and tables classification. - Tables problem is considered a classification - when the target column is CATEGORY DataType. - - This field is a member of `oneof`_ ``metrics``. - regression_evaluation_metrics (google.cloud.automl_v1beta1.types.RegressionEvaluationMetrics): - Model evaluation metrics for Tables - regression. Tables problem is considered a - regression when the target column has FLOAT64 - DataType. - - This field is a member of `oneof`_ ``metrics``. - translation_evaluation_metrics (google.cloud.automl_v1beta1.types.TranslationEvaluationMetrics): - Model evaluation metrics for translation. - - This field is a member of `oneof`_ ``metrics``. - image_object_detection_evaluation_metrics (google.cloud.automl_v1beta1.types.ImageObjectDetectionEvaluationMetrics): - Model evaluation metrics for image object - detection. - - This field is a member of `oneof`_ ``metrics``. - video_object_tracking_evaluation_metrics (google.cloud.automl_v1beta1.types.VideoObjectTrackingEvaluationMetrics): - Model evaluation metrics for video object - tracking. - - This field is a member of `oneof`_ ``metrics``. - text_sentiment_evaluation_metrics (google.cloud.automl_v1beta1.types.TextSentimentEvaluationMetrics): - Evaluation metrics for text sentiment models. - - This field is a member of `oneof`_ ``metrics``. - text_extraction_evaluation_metrics (google.cloud.automl_v1beta1.types.TextExtractionEvaluationMetrics): - Evaluation metrics for text extraction - models. - - This field is a member of `oneof`_ ``metrics``. - name (str): - Output only. Resource name of the model evaluation. Format: - - ``projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}`` - annotation_spec_id (str): - Output only. The ID of the annotation spec that the model - evaluation applies to. The The ID is empty for the overall - model evaluation. For Tables annotation specs in the dataset - do not exist and this ID is always not set, but for - CLASSIFICATION - - [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type] - the - [display_name][google.cloud.automl.v1beta1.ModelEvaluation.display_name] - field is used. - display_name (str): - Output only. The value of - [display_name][google.cloud.automl.v1beta1.AnnotationSpec.display_name] - at the moment when the model was trained. Because this field - returns a value at model training time, for different models - trained from the same dataset, the values may differ, since - display names could had been changed between the two model's - trainings. For Tables CLASSIFICATION - - [prediction_type-s][google.cloud.automl.v1beta1.TablesModelMetadata.prediction_type] - distinct values of the target column at the moment of the - model evaluation are populated here. The display_name is - empty for the overall model evaluation. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when this model - evaluation was created. - evaluated_example_count (int): - Output only. The number of examples used for model - evaluation, i.e. for which ground truth from time of model - creation is compared against the predicted annotations - created by the model. For overall ModelEvaluation (i.e. with - annotation_spec_id not set) this is the total number of all - examples used for evaluation. Otherwise, this is the count - of examples that according to the ground truth were - annotated by the - - [annotation_spec_id][google.cloud.automl.v1beta1.ModelEvaluation.annotation_spec_id]. - """ - - classification_evaluation_metrics: classification.ClassificationEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=8, - oneof='metrics', - message=classification.ClassificationEvaluationMetrics, - ) - regression_evaluation_metrics: regression.RegressionEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=24, - oneof='metrics', - message=regression.RegressionEvaluationMetrics, - ) - translation_evaluation_metrics: translation.TranslationEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=9, - oneof='metrics', - message=translation.TranslationEvaluationMetrics, - ) - image_object_detection_evaluation_metrics: detection.ImageObjectDetectionEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=12, - oneof='metrics', - message=detection.ImageObjectDetectionEvaluationMetrics, - ) - video_object_tracking_evaluation_metrics: detection.VideoObjectTrackingEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=14, - oneof='metrics', - message=detection.VideoObjectTrackingEvaluationMetrics, - ) - text_sentiment_evaluation_metrics: text_sentiment.TextSentimentEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=11, - oneof='metrics', - message=text_sentiment.TextSentimentEvaluationMetrics, - ) - text_extraction_evaluation_metrics: text_extraction.TextExtractionEvaluationMetrics = proto.Field( - proto.MESSAGE, - number=13, - oneof='metrics', - message=text_extraction.TextExtractionEvaluationMetrics, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - annotation_spec_id: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=15, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - evaluated_example_count: int = proto.Field( - proto.INT32, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/operations.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/operations.py deleted file mode 100644 index 047fbd3bf7d9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/operations.py +++ /dev/null @@ -1,392 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import io -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'OperationMetadata', - 'DeleteOperationMetadata', - 'DeployModelOperationMetadata', - 'UndeployModelOperationMetadata', - 'CreateModelOperationMetadata', - 'ImportDataOperationMetadata', - 'ExportDataOperationMetadata', - 'BatchPredictOperationMetadata', - 'ExportModelOperationMetadata', - 'ExportEvaluatedExamplesOperationMetadata', - }, -) - - -class OperationMetadata(proto.Message): - r"""Metadata used across all long running operations returned by - AutoML API. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - delete_details (google.cloud.automl_v1beta1.types.DeleteOperationMetadata): - Details of a Delete operation. - - This field is a member of `oneof`_ ``details``. - deploy_model_details (google.cloud.automl_v1beta1.types.DeployModelOperationMetadata): - Details of a DeployModel operation. - - This field is a member of `oneof`_ ``details``. - undeploy_model_details (google.cloud.automl_v1beta1.types.UndeployModelOperationMetadata): - Details of an UndeployModel operation. - - This field is a member of `oneof`_ ``details``. - create_model_details (google.cloud.automl_v1beta1.types.CreateModelOperationMetadata): - Details of CreateModel operation. - - This field is a member of `oneof`_ ``details``. - import_data_details (google.cloud.automl_v1beta1.types.ImportDataOperationMetadata): - Details of ImportData operation. - - This field is a member of `oneof`_ ``details``. - batch_predict_details (google.cloud.automl_v1beta1.types.BatchPredictOperationMetadata): - Details of BatchPredict operation. - - This field is a member of `oneof`_ ``details``. - export_data_details (google.cloud.automl_v1beta1.types.ExportDataOperationMetadata): - Details of ExportData operation. - - This field is a member of `oneof`_ ``details``. - export_model_details (google.cloud.automl_v1beta1.types.ExportModelOperationMetadata): - Details of ExportModel operation. - - This field is a member of `oneof`_ ``details``. - export_evaluated_examples_details (google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOperationMetadata): - Details of ExportEvaluatedExamples operation. - - This field is a member of `oneof`_ ``details``. - progress_percent (int): - Output only. Progress of operation. Range: [0, 100]. Not - used currently. - partial_failures (MutableSequence[google.rpc.status_pb2.Status]): - Output only. Partial failures encountered. - E.g. single files that couldn't be read. - This field should never exceed 20 entries. - Status details field will contain standard GCP - error details. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the operation was - updated for the last time. - """ - - delete_details: 'DeleteOperationMetadata' = proto.Field( - proto.MESSAGE, - number=8, - oneof='details', - message='DeleteOperationMetadata', - ) - deploy_model_details: 'DeployModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=24, - oneof='details', - message='DeployModelOperationMetadata', - ) - undeploy_model_details: 'UndeployModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=25, - oneof='details', - message='UndeployModelOperationMetadata', - ) - create_model_details: 'CreateModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=10, - oneof='details', - message='CreateModelOperationMetadata', - ) - import_data_details: 'ImportDataOperationMetadata' = proto.Field( - proto.MESSAGE, - number=15, - oneof='details', - message='ImportDataOperationMetadata', - ) - batch_predict_details: 'BatchPredictOperationMetadata' = proto.Field( - proto.MESSAGE, - number=16, - oneof='details', - message='BatchPredictOperationMetadata', - ) - export_data_details: 'ExportDataOperationMetadata' = proto.Field( - proto.MESSAGE, - number=21, - oneof='details', - message='ExportDataOperationMetadata', - ) - export_model_details: 'ExportModelOperationMetadata' = proto.Field( - proto.MESSAGE, - number=22, - oneof='details', - message='ExportModelOperationMetadata', - ) - export_evaluated_examples_details: 'ExportEvaluatedExamplesOperationMetadata' = proto.Field( - proto.MESSAGE, - number=26, - oneof='details', - message='ExportEvaluatedExamplesOperationMetadata', - ) - progress_percent: int = proto.Field( - proto.INT32, - number=13, - ) - partial_failures: MutableSequence[status_pb2.Status] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - - -class DeleteOperationMetadata(proto.Message): - r"""Details of operations that perform deletes of any entities. - """ - - -class DeployModelOperationMetadata(proto.Message): - r"""Details of DeployModel operation. - """ - - -class UndeployModelOperationMetadata(proto.Message): - r"""Details of UndeployModel operation. - """ - - -class CreateModelOperationMetadata(proto.Message): - r"""Details of CreateModel operation. - """ - - -class ImportDataOperationMetadata(proto.Message): - r"""Details of ImportData operation. - """ - - -class ExportDataOperationMetadata(proto.Message): - r"""Details of ExportData operation. - - Attributes: - output_info (google.cloud.automl_v1beta1.types.ExportDataOperationMetadata.ExportDataOutputInfo): - Output only. Information further describing - this export data's output. - """ - - class ExportDataOutputInfo(proto.Message): - r"""Further describes this export data's output. Supplements - [OutputConfig][google.cloud.automl.v1beta1.OutputConfig]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_output_directory (str): - The full path of the Google Cloud Storage - directory created, into which the exported data - is written. - - This field is a member of `oneof`_ ``output_location``. - bigquery_output_dataset (str): - The path of the BigQuery dataset created, in - bq://projectId.bqDatasetId format, into which - the exported data is written. - - This field is a member of `oneof`_ ``output_location``. - """ - - gcs_output_directory: str = proto.Field( - proto.STRING, - number=1, - oneof='output_location', - ) - bigquery_output_dataset: str = proto.Field( - proto.STRING, - number=2, - oneof='output_location', - ) - - output_info: ExportDataOutputInfo = proto.Field( - proto.MESSAGE, - number=1, - message=ExportDataOutputInfo, - ) - - -class BatchPredictOperationMetadata(proto.Message): - r"""Details of BatchPredict operation. - - Attributes: - input_config (google.cloud.automl_v1beta1.types.BatchPredictInputConfig): - Output only. The input config that was given - upon starting this batch predict operation. - output_info (google.cloud.automl_v1beta1.types.BatchPredictOperationMetadata.BatchPredictOutputInfo): - Output only. Information further describing - this batch predict's output. - """ - - class BatchPredictOutputInfo(proto.Message): - r"""Further describes this batch predict's output. Supplements - - [BatchPredictOutputConfig][google.cloud.automl.v1beta1.BatchPredictOutputConfig]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_output_directory (str): - The full path of the Google Cloud Storage - directory created, into which the prediction - output is written. - - This field is a member of `oneof`_ ``output_location``. - bigquery_output_dataset (str): - The path of the BigQuery dataset created, in - bq://projectId.bqDatasetId format, into which - the prediction output is written. - - This field is a member of `oneof`_ ``output_location``. - """ - - gcs_output_directory: str = proto.Field( - proto.STRING, - number=1, - oneof='output_location', - ) - bigquery_output_dataset: str = proto.Field( - proto.STRING, - number=2, - oneof='output_location', - ) - - input_config: io.BatchPredictInputConfig = proto.Field( - proto.MESSAGE, - number=1, - message=io.BatchPredictInputConfig, - ) - output_info: BatchPredictOutputInfo = proto.Field( - proto.MESSAGE, - number=2, - message=BatchPredictOutputInfo, - ) - - -class ExportModelOperationMetadata(proto.Message): - r"""Details of ExportModel operation. - - Attributes: - output_info (google.cloud.automl_v1beta1.types.ExportModelOperationMetadata.ExportModelOutputInfo): - Output only. Information further describing - the output of this model export. - """ - - class ExportModelOutputInfo(proto.Message): - r"""Further describes the output of model export. Supplements - - [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. - - Attributes: - gcs_output_directory (str): - The full path of the Google Cloud Storage - directory created, into which the model will be - exported. - """ - - gcs_output_directory: str = proto.Field( - proto.STRING, - number=1, - ) - - output_info: ExportModelOutputInfo = proto.Field( - proto.MESSAGE, - number=2, - message=ExportModelOutputInfo, - ) - - -class ExportEvaluatedExamplesOperationMetadata(proto.Message): - r"""Details of EvaluatedExamples operation. - - Attributes: - output_info (google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOperationMetadata.ExportEvaluatedExamplesOutputInfo): - Output only. Information further describing - the output of this evaluated examples export. - """ - - class ExportEvaluatedExamplesOutputInfo(proto.Message): - r"""Further describes the output of the evaluated examples export. - Supplements - - [ExportEvaluatedExamplesOutputConfig][google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig]. - - Attributes: - bigquery_output_dataset (str): - The path of the BigQuery dataset created, in - bq://projectId.bqDatasetId format, into which - the output of export evaluated examples is - written. - """ - - bigquery_output_dataset: str = proto.Field( - proto.STRING, - number=2, - ) - - output_info: ExportEvaluatedExamplesOutputInfo = proto.Field( - proto.MESSAGE, - number=2, - message=ExportEvaluatedExamplesOutputInfo, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/prediction_service.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/prediction_service.py deleted file mode 100644 index 9a83edd65c2d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/prediction_service.py +++ /dev/null @@ -1,285 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import annotation_payload -from google.cloud.automl_v1beta1.types import data_items -from google.cloud.automl_v1beta1.types import io - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'PredictRequest', - 'PredictResponse', - 'BatchPredictRequest', - 'BatchPredictResult', - }, -) - - -class PredictRequest(proto.Message): - r"""Request message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - - Attributes: - name (str): - Required. Name of the model requested to - serve the prediction. - payload (google.cloud.automl_v1beta1.types.ExamplePayload): - Required. Payload to perform a prediction on. - The payload must match the problem type that the - model was trained to solve. - params (MutableMapping[str, str]): - Additional domain-specific parameters, any string must be up - to 25000 characters long. - - - For Image Classification: - - ``score_threshold`` - (float) A value from 0.0 to 1.0. - When the model makes predictions for an image, it will - only produce results that have at least this confidence - score. The default is 0.5. - - - For Image Object Detection: ``score_threshold`` - (float) - When Model detects objects on the image, it will only - produce bounding boxes which have at least this - confidence score. Value in 0 to 1 range, default is 0.5. - ``max_bounding_box_count`` - (int64) No more than this - number of bounding boxes will be returned in the - response. Default is 100, the requested value may be - limited by server. - - - For Tables: feature_importance - (boolean) Whether - feature importance should be populated in the returned - TablesAnnotation. The default is false. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - payload: data_items.ExamplePayload = proto.Field( - proto.MESSAGE, - number=2, - message=data_items.ExamplePayload, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class PredictResponse(proto.Message): - r"""Response message for - [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. - - Attributes: - payload (MutableSequence[google.cloud.automl_v1beta1.types.AnnotationPayload]): - Prediction result. - Translation and Text Sentiment will return - precisely one payload. - preprocessed_input (google.cloud.automl_v1beta1.types.ExamplePayload): - The preprocessed example that AutoML actually makes - prediction on. Empty if AutoML does not preprocess the input - example. - - - For Text Extraction: If the input is a .pdf file, the - OCR'ed text will be provided in - [document_text][google.cloud.automl.v1beta1.Document.document_text]. - metadata (MutableMapping[str, str]): - Additional domain-specific prediction response metadata. - - - For Image Object Detection: ``max_bounding_box_count`` - - (int64) At most that many bounding boxes per image could - have been returned. - - - For Text Sentiment: ``sentiment_score`` - (float, - deprecated) A value between -1 and 1, -1 maps to least - positive sentiment, while 1 maps to the most positive one - and the higher the score, the more positive the sentiment - in the document is. Yet these values are relative to the - training data, so e.g. if all data was positive then -1 - will be also positive (though the least). The - sentiment_score shouldn't be confused with "score" or - "magnitude" from the previous Natural Language Sentiment - Analysis API. - """ - - payload: MutableSequence[annotation_payload.AnnotationPayload] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=annotation_payload.AnnotationPayload, - ) - preprocessed_input: data_items.ExamplePayload = proto.Field( - proto.MESSAGE, - number=3, - message=data_items.ExamplePayload, - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class BatchPredictRequest(proto.Message): - r"""Request message for - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - - Attributes: - name (str): - Required. Name of the model requested to - serve the batch prediction. - input_config (google.cloud.automl_v1beta1.types.BatchPredictInputConfig): - Required. The input configuration for batch - prediction. - output_config (google.cloud.automl_v1beta1.types.BatchPredictOutputConfig): - Required. The Configuration specifying where - output predictions should be written. - params (MutableMapping[str, str]): - Required. Additional domain-specific parameters for the - predictions, any string must be up to 25000 characters long. - - - For Text Classification: - - ``score_threshold`` - (float) A value from 0.0 to 1.0. - When the model makes predictions for a text snippet, it - will only produce results that have at least this - confidence score. The default is 0.5. - - - For Image Classification: - - ``score_threshold`` - (float) A value from 0.0 to 1.0. - When the model makes predictions for an image, it will - only produce results that have at least this confidence - score. The default is 0.5. - - - For Image Object Detection: - - ``score_threshold`` - (float) When Model detects objects - on the image, it will only produce bounding boxes which - have at least this confidence score. Value in 0 to 1 - range, default is 0.5. ``max_bounding_box_count`` - - (int64) No more than this number of bounding boxes will - be produced per image. Default is 100, the requested - value may be limited by server. - - - For Video Classification : - - ``score_threshold`` - (float) A value from 0.0 to 1.0. - When the model makes predictions for a video, it will - only produce results that have at least this confidence - score. The default is 0.5. ``segment_classification`` - - (boolean) Set to true to request segment-level - classification. AutoML Video Intelligence returns labels - and their confidence scores for the entire segment of the - video that user specified in the request configuration. - The default is "true". ``shot_classification`` - - (boolean) Set to true to request shot-level - classification. AutoML Video Intelligence determines the - boundaries for each camera shot in the entire segment of - the video that user specified in the request - configuration. AutoML Video Intelligence then returns - labels and their confidence scores for each detected - shot, along with the start and end time of the shot. - WARNING: Model evaluation is not done for this - classification type, the quality of it depends on - training data, but there are no metrics provided to - describe that quality. The default is "false". - ``1s_interval_classification`` - (boolean) Set to true to - request classification for a video at one-second - intervals. AutoML Video Intelligence returns labels and - their confidence scores for each second of the entire - segment of the video that user specified in the request - configuration. WARNING: Model evaluation is not done for - this classification type, the quality of it depends on - training data, but there are no metrics provided to - describe that quality. The default is "false". - - - For Tables: - - feature_importance - (boolean) Whether feature importance - should be populated in the returned TablesAnnotations. - The default is false. - - - For Video Object Tracking: - - ``score_threshold`` - (float) When Model detects objects - on video frames, it will only produce bounding boxes - which have at least this confidence score. Value in 0 to - 1 range, default is 0.5. ``max_bounding_box_count`` - - (int64) No more than this number of bounding boxes will - be returned per frame. Default is 100, the requested - value may be limited by server. ``min_bounding_box_size`` - - (float) Only bounding boxes with shortest edge at least - that long as a relative value of video frame size will be - returned. Value in 0 to 1 range. Default is 0. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - input_config: io.BatchPredictInputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.BatchPredictInputConfig, - ) - output_config: io.BatchPredictOutputConfig = proto.Field( - proto.MESSAGE, - number=4, - message=io.BatchPredictOutputConfig, - ) - params: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class BatchPredictResult(proto.Message): - r"""Result of the Batch Predict. This message is returned in - [response][google.longrunning.Operation.response] of the operation - returned by the - [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. - - Attributes: - metadata (MutableMapping[str, str]): - Additional domain-specific prediction response metadata. - - - For Image Object Detection: ``max_bounding_box_count`` - - (int64) At most that many bounding boxes per image could - have been returned. - - - For Video Object Tracking: ``max_bounding_box_count`` - - (int64) At most that many bounding boxes per frame could - have been returned. - """ - - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/ranges.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/ranges.py deleted file mode 100644 index 7dfd9e25eefd..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/ranges.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'DoubleRange', - }, -) - - -class DoubleRange(proto.Message): - r"""A range between two double numbers. - - Attributes: - start (float): - Start of the range, inclusive. - end (float): - End of the range, exclusive. - """ - - start: float = proto.Field( - proto.DOUBLE, - number=1, - ) - end: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/regression.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/regression.py deleted file mode 100644 index cef0fdde13fb..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/regression.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'RegressionEvaluationMetrics', - }, -) - - -class RegressionEvaluationMetrics(proto.Message): - r"""Metrics for regression problems. - - Attributes: - root_mean_squared_error (float): - Output only. Root Mean Squared Error (RMSE). - mean_absolute_error (float): - Output only. Mean Absolute Error (MAE). - mean_absolute_percentage_error (float): - Output only. Mean absolute percentage error. - Only set if all ground truth values are are - positive. - r_squared (float): - Output only. R squared. - root_mean_squared_log_error (float): - Output only. Root mean squared log error. - """ - - root_mean_squared_error: float = proto.Field( - proto.FLOAT, - number=1, - ) - mean_absolute_error: float = proto.Field( - proto.FLOAT, - number=2, - ) - mean_absolute_percentage_error: float = proto.Field( - proto.FLOAT, - number=3, - ) - r_squared: float = proto.Field( - proto.FLOAT, - number=4, - ) - root_mean_squared_log_error: float = proto.Field( - proto.FLOAT, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/service.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/service.py deleted file mode 100644 index 34f50c3f76cf..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/service.py +++ /dev/null @@ -1,874 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import image -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import model as gca_model -from google.cloud.automl_v1beta1.types import model_evaluation as gca_model_evaluation -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'CreateDatasetRequest', - 'GetDatasetRequest', - 'ListDatasetsRequest', - 'ListDatasetsResponse', - 'UpdateDatasetRequest', - 'DeleteDatasetRequest', - 'ImportDataRequest', - 'ExportDataRequest', - 'GetAnnotationSpecRequest', - 'GetTableSpecRequest', - 'ListTableSpecsRequest', - 'ListTableSpecsResponse', - 'UpdateTableSpecRequest', - 'GetColumnSpecRequest', - 'ListColumnSpecsRequest', - 'ListColumnSpecsResponse', - 'UpdateColumnSpecRequest', - 'CreateModelRequest', - 'GetModelRequest', - 'ListModelsRequest', - 'ListModelsResponse', - 'DeleteModelRequest', - 'DeployModelRequest', - 'UndeployModelRequest', - 'ExportModelRequest', - 'ExportEvaluatedExamplesRequest', - 'GetModelEvaluationRequest', - 'ListModelEvaluationsRequest', - 'ListModelEvaluationsResponse', - }, -) - - -class CreateDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.CreateDataset][google.cloud.automl.v1beta1.AutoMl.CreateDataset]. - - Attributes: - parent (str): - Required. The resource name of the project to - create the dataset for. - dataset (google.cloud.automl_v1beta1.types.Dataset): - Required. The dataset to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - dataset: gca_dataset.Dataset = proto.Field( - proto.MESSAGE, - number=2, - message=gca_dataset.Dataset, - ) - - -class GetDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.GetDataset][google.cloud.automl.v1beta1.AutoMl.GetDataset]. - - Attributes: - name (str): - Required. The resource name of the dataset to - retrieve. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDatasetsRequest(proto.Message): - r"""Request message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - - Attributes: - parent (str): - Required. The resource name of the project - from which to list datasets. - filter (str): - An expression for filtering the results of the request. - - - ``dataset_metadata`` - for existence of the case (e.g. - ``image_classification_dataset_metadata:*``). Some - examples of using the filter are: - - - ``translation_dataset_metadata:*`` --> The dataset has - ``translation_dataset_metadata``. - page_size (int): - Requested page size. Server may return fewer - results than requested. If unspecified, server - will pick a default size. - page_token (str): - A token identifying a page of results for the server to - return Typically obtained via - [ListDatasetsResponse.next_page_token][google.cloud.automl.v1beta1.ListDatasetsResponse.next_page_token] - of the previous - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListDatasetsResponse(proto.Message): - r"""Response message for - [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. - - Attributes: - datasets (MutableSequence[google.cloud.automl_v1beta1.types.Dataset]): - The datasets read. - next_page_token (str): - A token to retrieve next page of results. Pass to - [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] - to obtain that page. - """ - - @property - def raw_page(self): - return self - - datasets: MutableSequence[gca_dataset.Dataset] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.UpdateDataset][google.cloud.automl.v1beta1.AutoMl.UpdateDataset] - - Attributes: - dataset (google.cloud.automl_v1beta1.types.Dataset): - Required. The dataset which replaces the - resource on the server. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. - """ - - dataset: gca_dataset.Dataset = proto.Field( - proto.MESSAGE, - number=1, - message=gca_dataset.Dataset, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteDatasetRequest(proto.Message): - r"""Request message for - [AutoMl.DeleteDataset][google.cloud.automl.v1beta1.AutoMl.DeleteDataset]. - - Attributes: - name (str): - Required. The resource name of the dataset to - delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ImportDataRequest(proto.Message): - r"""Request message for - [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData]. - - Attributes: - name (str): - Required. Dataset name. Dataset must already - exist. All imported annotations and examples - will be added. - input_config (google.cloud.automl_v1beta1.types.InputConfig): - Required. The desired input location and its - domain specific semantics, if any. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - input_config: io.InputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.InputConfig, - ) - - -class ExportDataRequest(proto.Message): - r"""Request message for - [AutoMl.ExportData][google.cloud.automl.v1beta1.AutoMl.ExportData]. - - Attributes: - name (str): - Required. The resource name of the dataset. - output_config (google.cloud.automl_v1beta1.types.OutputConfig): - Required. The desired output location. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - output_config: io.OutputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.OutputConfig, - ) - - -class GetAnnotationSpecRequest(proto.Message): - r"""Request message for - [AutoMl.GetAnnotationSpec][google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec]. - - Attributes: - name (str): - Required. The resource name of the annotation - spec to retrieve. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetTableSpecRequest(proto.Message): - r"""Request message for - [AutoMl.GetTableSpec][google.cloud.automl.v1beta1.AutoMl.GetTableSpec]. - - Attributes: - name (str): - Required. The resource name of the table spec - to retrieve. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask specifying which fields to read. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListTableSpecsRequest(proto.Message): - r"""Request message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - - Attributes: - parent (str): - Required. The resource name of the dataset to - list table specs from. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask specifying which fields to read. - filter (str): - Filter expression, see go/filtering. - page_size (int): - Requested page size. The server can return - fewer results than requested. If unspecified, - the server will pick a default size. - page_token (str): - A token identifying a page of results for the server to - return. Typically obtained from the - [ListTableSpecsResponse.next_page_token][google.cloud.automl.v1beta1.ListTableSpecsResponse.next_page_token] - field of the previous - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListTableSpecsResponse(proto.Message): - r"""Response message for - [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. - - Attributes: - table_specs (MutableSequence[google.cloud.automl_v1beta1.types.TableSpec]): - The table specs read. - next_page_token (str): - A token to retrieve next page of results. Pass to - [ListTableSpecsRequest.page_token][google.cloud.automl.v1beta1.ListTableSpecsRequest.page_token] - to obtain that page. - """ - - @property - def raw_page(self): - return self - - table_specs: MutableSequence[gca_table_spec.TableSpec] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_table_spec.TableSpec, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateTableSpecRequest(proto.Message): - r"""Request message for - [AutoMl.UpdateTableSpec][google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec] - - Attributes: - table_spec (google.cloud.automl_v1beta1.types.TableSpec): - Required. The table spec which replaces the - resource on the server. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. - """ - - table_spec: gca_table_spec.TableSpec = proto.Field( - proto.MESSAGE, - number=1, - message=gca_table_spec.TableSpec, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetColumnSpecRequest(proto.Message): - r"""Request message for - [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec]. - - Attributes: - name (str): - Required. The resource name of the column - spec to retrieve. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask specifying which fields to read. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListColumnSpecsRequest(proto.Message): - r"""Request message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - - Attributes: - parent (str): - Required. The resource name of the table spec - to list column specs from. - field_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask specifying which fields to read. - filter (str): - Filter expression, see go/filtering. - page_size (int): - Requested page size. The server can return - fewer results than requested. If unspecified, - the server will pick a default size. - page_token (str): - A token identifying a page of results for the server to - return. Typically obtained from the - [ListColumnSpecsResponse.next_page_token][google.cloud.automl.v1beta1.ListColumnSpecsResponse.next_page_token] - field of the previous - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - field_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListColumnSpecsResponse(proto.Message): - r"""Response message for - [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. - - Attributes: - column_specs (MutableSequence[google.cloud.automl_v1beta1.types.ColumnSpec]): - The column specs read. - next_page_token (str): - A token to retrieve next page of results. Pass to - [ListColumnSpecsRequest.page_token][google.cloud.automl.v1beta1.ListColumnSpecsRequest.page_token] - to obtain that page. - """ - - @property - def raw_page(self): - return self - - column_specs: MutableSequence[gca_column_spec.ColumnSpec] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_column_spec.ColumnSpec, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class UpdateColumnSpecRequest(proto.Message): - r"""Request message for - [AutoMl.UpdateColumnSpec][google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec] - - Attributes: - column_spec (google.cloud.automl_v1beta1.types.ColumnSpec): - Required. The column spec which replaces the - resource on the server. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. - """ - - column_spec: gca_column_spec.ColumnSpec = proto.Field( - proto.MESSAGE, - number=1, - message=gca_column_spec.ColumnSpec, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class CreateModelRequest(proto.Message): - r"""Request message for - [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel]. - - Attributes: - parent (str): - Required. Resource name of the parent project - where the model is being created. - model (google.cloud.automl_v1beta1.types.Model): - Required. The model to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - model: gca_model.Model = proto.Field( - proto.MESSAGE, - number=4, - message=gca_model.Model, - ) - - -class GetModelRequest(proto.Message): - r"""Request message for - [AutoMl.GetModel][google.cloud.automl.v1beta1.AutoMl.GetModel]. - - Attributes: - name (str): - Required. Resource name of the model. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListModelsRequest(proto.Message): - r"""Request message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - - Attributes: - parent (str): - Required. Resource name of the project, from - which to list the models. - filter (str): - An expression for filtering the results of the request. - - - ``model_metadata`` - for existence of the case (e.g. - ``video_classification_model_metadata:*``). - - - ``dataset_id`` - for = or !=. Some examples of using the - filter are: - - - ``image_classification_model_metadata:*`` --> The model - has ``image_classification_model_metadata``. - - - ``dataset_id=5`` --> The model was created from a dataset - with ID 5. - page_size (int): - Requested page size. - page_token (str): - A token identifying a page of results for the server to - return Typically obtained via - [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] - of the previous - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListModelsResponse(proto.Message): - r"""Response message for - [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. - - Attributes: - model (MutableSequence[google.cloud.automl_v1beta1.types.Model]): - List of models in the requested page. - next_page_token (str): - A token to retrieve next page of results. Pass to - [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] - to obtain that page. - """ - - @property - def raw_page(self): - return self - - model: MutableSequence[gca_model.Model] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model.Model, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteModelRequest(proto.Message): - r"""Request message for - [AutoMl.DeleteModel][google.cloud.automl.v1beta1.AutoMl.DeleteModel]. - - Attributes: - name (str): - Required. Resource name of the model being - deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeployModelRequest(proto.Message): - r"""Request message for - [AutoMl.DeployModel][google.cloud.automl.v1beta1.AutoMl.DeployModel]. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image_object_detection_model_deployment_metadata (google.cloud.automl_v1beta1.types.ImageObjectDetectionModelDeploymentMetadata): - Model deployment metadata specific to Image - Object Detection. - - This field is a member of `oneof`_ ``model_deployment_metadata``. - image_classification_model_deployment_metadata (google.cloud.automl_v1beta1.types.ImageClassificationModelDeploymentMetadata): - Model deployment metadata specific to Image - Classification. - - This field is a member of `oneof`_ ``model_deployment_metadata``. - name (str): - Required. Resource name of the model to - deploy. - """ - - image_object_detection_model_deployment_metadata: image.ImageObjectDetectionModelDeploymentMetadata = proto.Field( - proto.MESSAGE, - number=2, - oneof='model_deployment_metadata', - message=image.ImageObjectDetectionModelDeploymentMetadata, - ) - image_classification_model_deployment_metadata: image.ImageClassificationModelDeploymentMetadata = proto.Field( - proto.MESSAGE, - number=4, - oneof='model_deployment_metadata', - message=image.ImageClassificationModelDeploymentMetadata, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UndeployModelRequest(proto.Message): - r"""Request message for - [AutoMl.UndeployModel][google.cloud.automl.v1beta1.AutoMl.UndeployModel]. - - Attributes: - name (str): - Required. Resource name of the model to - undeploy. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ExportModelRequest(proto.Message): - r"""Request message for - [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]. - Models need to be enabled for exporting, otherwise an error code - will be returned. - - Attributes: - name (str): - Required. The resource name of the model to - export. - output_config (google.cloud.automl_v1beta1.types.ModelExportOutputConfig): - Required. The desired output location and - configuration. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - output_config: io.ModelExportOutputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.ModelExportOutputConfig, - ) - - -class ExportEvaluatedExamplesRequest(proto.Message): - r"""Request message for - [AutoMl.ExportEvaluatedExamples][google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples]. - - Attributes: - name (str): - Required. The resource name of the model - whose evaluated examples are to be exported. - output_config (google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOutputConfig): - Required. The desired output location and - configuration. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - output_config: io.ExportEvaluatedExamplesOutputConfig = proto.Field( - proto.MESSAGE, - number=3, - message=io.ExportEvaluatedExamplesOutputConfig, - ) - - -class GetModelEvaluationRequest(proto.Message): - r"""Request message for - [AutoMl.GetModelEvaluation][google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation]. - - Attributes: - name (str): - Required. Resource name for the model - evaluation. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListModelEvaluationsRequest(proto.Message): - r"""Request message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - - Attributes: - parent (str): - Required. Resource name of the model to list - the model evaluations for. If modelId is set as - "-", this will list model evaluations from - across all models of the parent location. - filter (str): - An expression for filtering the results of the request. - - - ``annotation_spec_id`` - for =, != or existence. See - example below for the last. - - Some examples of using the filter are: - - - ``annotation_spec_id!=4`` --> The model evaluation was - done for annotation spec with ID different than 4. - - ``NOT annotation_spec_id:*`` --> The model evaluation was - done for aggregate of all annotation specs. - page_size (int): - Requested page size. - page_token (str): - A token identifying a page of results for the server to - return. Typically obtained via - [ListModelEvaluationsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelEvaluationsResponse.next_page_token] - of the previous - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations] - call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListModelEvaluationsResponse(proto.Message): - r"""Response message for - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. - - Attributes: - model_evaluation (MutableSequence[google.cloud.automl_v1beta1.types.ModelEvaluation]): - List of model evaluations in the requested - page. - next_page_token (str): - A token to retrieve next page of results. Pass to the - [ListModelEvaluationsRequest.page_token][google.cloud.automl.v1beta1.ListModelEvaluationsRequest.page_token] - field of a new - [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations] - request to obtain that page. - """ - - @property - def raw_page(self): - return self - - model_evaluation: MutableSequence[gca_model_evaluation.ModelEvaluation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gca_model_evaluation.ModelEvaluation, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/table_spec.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/table_spec.py deleted file mode 100644 index 2b7ebe82406a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/table_spec.py +++ /dev/null @@ -1,111 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import io - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TableSpec', - }, -) - - -class TableSpec(proto.Message): - r"""A specification of a relational table. The table's schema is - represented via its child column specs. It is pre-populated as part - of ImportData by schema inference algorithm, the version of which is - a required parameter of ImportData InputConfig. Note: While working - with a table, at times the schema may be inconsistent with the data - in the table (e.g. string in a FLOAT64 column). The consistency - validation is done upon creation of a model. Used by: - - - Tables - - Attributes: - name (str): - Output only. The resource name of the table spec. Form: - - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/tableSpecs/{table_spec_id}`` - time_column_spec_id (str): - column_spec_id of the time column. Only used if the parent - dataset's ml_use_column_spec_id is not set. Used to split - rows into TRAIN, VALIDATE and TEST sets such that oldest - rows go to TRAIN set, newest to TEST, and those in between - to VALIDATE. Required type: TIMESTAMP. If both this column - and ml_use_column are not set, then ML use of all rows will - be assigned by AutoML. NOTE: Updates of this field will - instantly affect any other users concurrently working with - the dataset. - row_count (int): - Output only. The number of rows (i.e. - examples) in the table. - valid_row_count (int): - Output only. The number of valid rows (i.e. - without values that don't match DataType-s of - their columns). - column_count (int): - Output only. The number of columns of the - table. That is, the number of child - ColumnSpec-s. - input_configs (MutableSequence[google.cloud.automl_v1beta1.types.InputConfig]): - Output only. Input configs via which data - currently residing in the table had been - imported. - etag (str): - Used to perform consistent read-modify-write - updates. If not set, a blind "overwrite" update - happens. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - time_column_spec_id: str = proto.Field( - proto.STRING, - number=2, - ) - row_count: int = proto.Field( - proto.INT64, - number=3, - ) - valid_row_count: int = proto.Field( - proto.INT64, - number=4, - ) - column_count: int = proto.Field( - proto.INT64, - number=7, - ) - input_configs: MutableSequence[io.InputConfig] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=io.InputConfig, - ) - etag: str = proto.Field( - proto.STRING, - number=6, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/tables.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/tables.py deleted file mode 100644 index dc16f5952493..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/tables.py +++ /dev/null @@ -1,426 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import data_stats -from google.cloud.automl_v1beta1.types import ranges -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TablesDatasetMetadata', - 'TablesModelMetadata', - 'TablesAnnotation', - 'TablesModelColumnInfo', - }, -) - - -class TablesDatasetMetadata(proto.Message): - r"""Metadata for a dataset used for AutoML Tables. - - Attributes: - primary_table_spec_id (str): - Output only. The table_spec_id of the primary table of this - dataset. - target_column_spec_id (str): - column_spec_id of the primary table's column that should be - used as the training & prediction target. This column must - be non-nullable and have one of following data types - (otherwise model creation will error): - - - CATEGORY - - - FLOAT64 - - If the type is CATEGORY , only up to 100 unique values may - exist in that column across all rows. - - NOTE: Updates of this field will instantly affect any other - users concurrently working with the dataset. - weight_column_spec_id (str): - column_spec_id of the primary table's column that should be - used as the weight column, i.e. the higher the value the - more important the row will be during model training. - Required type: FLOAT64. Allowed values: 0 to 10000, - inclusive on both ends; 0 means the row is ignored for - training. If not set all rows are assumed to have equal - weight of 1. NOTE: Updates of this field will instantly - affect any other users concurrently working with the - dataset. - ml_use_column_spec_id (str): - column_spec_id of the primary table column which specifies a - possible ML use of the row, i.e. the column will be used to - split the rows into TRAIN, VALIDATE and TEST sets. Required - type: STRING. This column, if set, must either have all of - ``TRAIN``, ``VALIDATE``, ``TEST`` among its values, or only - have ``TEST``, ``UNASSIGNED`` values. In the latter case the - rows with ``UNASSIGNED`` value will be assigned by AutoML. - Note that if a given ml use distribution makes it impossible - to create a "good" model, that call will error describing - the issue. If both this column_spec_id and primary table's - time_column_spec_id are not set, then all rows are treated - as ``UNASSIGNED``. NOTE: Updates of this field will - instantly affect any other users concurrently working with - the dataset. - target_column_correlations (MutableMapping[str, google.cloud.automl_v1beta1.types.CorrelationStats]): - Output only. Correlations between - - [TablesDatasetMetadata.target_column_spec_id][google.cloud.automl.v1beta1.TablesDatasetMetadata.target_column_spec_id], - and other columns of the - - [TablesDatasetMetadataprimary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_spec_id]. - Only set if the target column is set. Mapping from other - column spec id to its CorrelationStats with the target - column. This field may be stale, see the stats_update_time - field for for the timestamp at which these stats were last - updated. - stats_update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The most recent timestamp when - target_column_correlations field and all descendant - ColumnSpec.data_stats and ColumnSpec.top_correlated_columns - fields were last (re-)generated. Any changes that happened - to the dataset afterwards are not reflected in these fields - values. The regeneration happens in the background on a best - effort basis. - """ - - primary_table_spec_id: str = proto.Field( - proto.STRING, - number=1, - ) - target_column_spec_id: str = proto.Field( - proto.STRING, - number=2, - ) - weight_column_spec_id: str = proto.Field( - proto.STRING, - number=3, - ) - ml_use_column_spec_id: str = proto.Field( - proto.STRING, - number=4, - ) - target_column_correlations: MutableMapping[str, data_stats.CorrelationStats] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=6, - message=data_stats.CorrelationStats, - ) - stats_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - - -class TablesModelMetadata(proto.Message): - r"""Model metadata specific to AutoML Tables. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - optimization_objective_recall_value (float): - Required when optimization_objective is - "MAXIMIZE_PRECISION_AT_RECALL". Must be between 0 and 1, - inclusive. - - This field is a member of `oneof`_ ``additional_optimization_objective_config``. - optimization_objective_precision_value (float): - Required when optimization_objective is - "MAXIMIZE_RECALL_AT_PRECISION". Must be between 0 and 1, - inclusive. - - This field is a member of `oneof`_ ``additional_optimization_objective_config``. - target_column_spec (google.cloud.automl_v1beta1.types.ColumnSpec): - Column spec of the dataset's primary table's column the - model is predicting. Snapshotted when model creation - started. Only 3 fields are used: name - May be set on - CreateModel, if it's not then the ColumnSpec corresponding - to the current target_column_spec_id of the dataset the - model is trained from is used. If neither is set, - CreateModel will error. display_name - Output only. - data_type - Output only. - input_feature_column_specs (MutableSequence[google.cloud.automl_v1beta1.types.ColumnSpec]): - Column specs of the dataset's primary table's columns, on - which the model is trained and which are used as the input - for predictions. The - - [target_column][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - as well as, according to dataset's state upon model - creation, - - [weight_column][google.cloud.automl.v1beta1.TablesDatasetMetadata.weight_column_spec_id], - and - - [ml_use_column][google.cloud.automl.v1beta1.TablesDatasetMetadata.ml_use_column_spec_id] - must never be included here. - - Only 3 fields are used: - - - name - May be set on CreateModel, if set only the columns - specified are used, otherwise all primary table's columns - (except the ones listed above) are used for the training - and prediction input. - - - display_name - Output only. - - - data_type - Output only. - optimization_objective (str): - Objective function the model is optimizing towards. The - training process creates a model that maximizes/minimizes - the value of the objective function over the validation set. - - The supported optimization objectives depend on the - prediction type. If the field is not set, a default - objective function is used. - - CLASSIFICATION_BINARY: "MAXIMIZE_AU_ROC" (default) - - Maximize the area under the receiver operating - characteristic (ROC) curve. "MINIMIZE_LOG_LOSS" - Minimize - log loss. "MAXIMIZE_AU_PRC" - Maximize the area under the - precision-recall curve. "MAXIMIZE_PRECISION_AT_RECALL" - - Maximize precision for a specified recall value. - "MAXIMIZE_RECALL_AT_PRECISION" - Maximize recall for a - specified precision value. - - CLASSIFICATION_MULTI_CLASS : "MINIMIZE_LOG_LOSS" (default) - - Minimize log loss. - - REGRESSION: "MINIMIZE_RMSE" (default) - Minimize - root-mean-squared error (RMSE). "MINIMIZE_MAE" - Minimize - mean-absolute error (MAE). "MINIMIZE_RMSLE" - Minimize - root-mean-squared log error (RMSLE). - tables_model_column_info (MutableSequence[google.cloud.automl_v1beta1.types.TablesModelColumnInfo]): - Output only. Auxiliary information for each of the - input_feature_column_specs with respect to this particular - model. - train_budget_milli_node_hours (int): - Required. The train budget of creating this - model, expressed in milli node hours i.e. 1,000 - value in this field means 1 node hour. - - The training cost of the model will not exceed - this budget. The final cost will be attempted to - be close to the budget, though may end up being - (even) noticeably smaller - at the backend's - discretion. This especially may happen when - further model training ceases to provide any - improvements. - - If the budget is set to a value known to be - insufficient to train a model for the given - dataset, the training won't be attempted and - will error. - - The train budget must be between 1,000 and - 72,000 milli node hours, inclusive. - train_cost_milli_node_hours (int): - Output only. The actual training cost of the - model, expressed in milli node hours, i.e. 1,000 - value in this field means 1 node hour. - Guaranteed to not exceed the train budget. - disable_early_stopping (bool): - Use the entire training budget. This disables - the early stopping feature. By default, the - early stopping feature is enabled, which means - that AutoML Tables might stop training before - the entire training budget has been used. - """ - - optimization_objective_recall_value: float = proto.Field( - proto.FLOAT, - number=17, - oneof='additional_optimization_objective_config', - ) - optimization_objective_precision_value: float = proto.Field( - proto.FLOAT, - number=18, - oneof='additional_optimization_objective_config', - ) - target_column_spec: column_spec.ColumnSpec = proto.Field( - proto.MESSAGE, - number=2, - message=column_spec.ColumnSpec, - ) - input_feature_column_specs: MutableSequence[column_spec.ColumnSpec] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=column_spec.ColumnSpec, - ) - optimization_objective: str = proto.Field( - proto.STRING, - number=4, - ) - tables_model_column_info: MutableSequence['TablesModelColumnInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='TablesModelColumnInfo', - ) - train_budget_milli_node_hours: int = proto.Field( - proto.INT64, - number=6, - ) - train_cost_milli_node_hours: int = proto.Field( - proto.INT64, - number=7, - ) - disable_early_stopping: bool = proto.Field( - proto.BOOL, - number=12, - ) - - -class TablesAnnotation(proto.Message): - r"""Contains annotation details specific to Tables. - - Attributes: - score (float): - Output only. A confidence estimate between 0.0 and 1.0, - inclusive. A higher value means greater confidence in the - returned value. For - - [target_column_spec][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - of FLOAT64 data type the score is not populated. - prediction_interval (google.cloud.automl_v1beta1.types.DoubleRange): - Output only. Only populated when - - [target_column_spec][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] - has FLOAT64 data type. An interval in which the exactly - correct target value has 95% chance to be in. - value (google.protobuf.struct_pb2.Value): - The predicted value of the row's - - [target_column][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec]. - The value depends on the column's DataType: - - - CATEGORY - the predicted (with the above confidence - ``score``) CATEGORY value. - - - FLOAT64 - the predicted (with above - ``prediction_interval``) FLOAT64 value. - tables_model_column_info (MutableSequence[google.cloud.automl_v1beta1.types.TablesModelColumnInfo]): - Output only. Auxiliary information for each of the model's - - [input_feature_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] - with respect to this particular prediction. If no other - fields than - - [column_spec_name][google.cloud.automl.v1beta1.TablesModelColumnInfo.column_spec_name] - and - - [column_display_name][google.cloud.automl.v1beta1.TablesModelColumnInfo.column_display_name] - would be populated, then this whole field is not. - baseline_score (float): - Output only. Stores the prediction score for - the baseline example, which is defined as the - example with all values set to their baseline - values. This is used as part of the Sampled - Shapley explanation of the model's prediction. - This field is populated only when feature - importance is requested. For regression models, - this holds the baseline prediction for the - baseline example. For classification models, - this holds the baseline prediction for the - baseline example for the argmax class. - """ - - score: float = proto.Field( - proto.FLOAT, - number=1, - ) - prediction_interval: ranges.DoubleRange = proto.Field( - proto.MESSAGE, - number=4, - message=ranges.DoubleRange, - ) - value: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - tables_model_column_info: MutableSequence['TablesModelColumnInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TablesModelColumnInfo', - ) - baseline_score: float = proto.Field( - proto.FLOAT, - number=5, - ) - - -class TablesModelColumnInfo(proto.Message): - r"""An information specific to given column and Tables Model, in - context of the Model and the predictions created by it. - - Attributes: - column_spec_name (str): - Output only. The name of the ColumnSpec - describing the column. Not populated when this - proto is outputted to BigQuery. - column_display_name (str): - Output only. The display name of the column (same as the - display_name of its ColumnSpec). - feature_importance (float): - Output only. When given as part of a Model (always - populated): Measurement of how much model predictions - correctness on the TEST data depend on values in this - column. A value between 0 and 1, higher means higher - influence. These values are normalized - for all input - feature columns of a given model they add to 1. - - When given back by Predict (populated iff - [feature_importance - param][google.cloud.automl.v1beta1.PredictRequest.params] is - set) or Batch Predict (populated iff - [feature_importance][google.cloud.automl.v1beta1.PredictRequest.params] - param is set): Measurement of how impactful for the - prediction returned for the given row the value in this - column was. Specifically, the feature importance specifies - the marginal contribution that the feature made to the - prediction score compared to the baseline score. These - values are computed using the Sampled Shapley method. - """ - - column_spec_name: str = proto.Field( - proto.STRING, - number=1, - ) - column_display_name: str = proto.Field( - proto.STRING, - number=2, - ) - feature_importance: float = proto.Field( - proto.FLOAT, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/temporal.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/temporal.py deleted file mode 100644 index 2687738b2305..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/temporal.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TimeSegment', - }, -) - - -class TimeSegment(proto.Message): - r"""A time period inside of an example that has a time dimension - (e.g. video). - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - Start of the time segment (inclusive), - represented as the duration since the example - start. - end_time_offset (google.protobuf.duration_pb2.Duration): - End of the time segment (exclusive), - represented as the duration since the example - start. - """ - - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - end_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text.py deleted file mode 100644 index adfc43ddefc1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import classification - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TextClassificationDatasetMetadata', - 'TextClassificationModelMetadata', - 'TextExtractionDatasetMetadata', - 'TextExtractionModelMetadata', - 'TextSentimentDatasetMetadata', - 'TextSentimentModelMetadata', - }, -) - - -class TextClassificationDatasetMetadata(proto.Message): - r"""Dataset metadata for classification. - - Attributes: - classification_type (google.cloud.automl_v1beta1.types.ClassificationType): - Required. Type of the classification problem. - """ - - classification_type: classification.ClassificationType = proto.Field( - proto.ENUM, - number=1, - enum=classification.ClassificationType, - ) - - -class TextClassificationModelMetadata(proto.Message): - r"""Model metadata that is specific to text classification. - - Attributes: - classification_type (google.cloud.automl_v1beta1.types.ClassificationType): - Output only. Classification type of the - dataset used to train this model. - """ - - classification_type: classification.ClassificationType = proto.Field( - proto.ENUM, - number=3, - enum=classification.ClassificationType, - ) - - -class TextExtractionDatasetMetadata(proto.Message): - r"""Dataset metadata that is specific to text extraction - """ - - -class TextExtractionModelMetadata(proto.Message): - r"""Model metadata that is specific to text extraction. - - Attributes: - model_hint (str): - Indicates the scope of model use case. - - - ``default``: Use to train a general text extraction - model. Default value. - - - ``health_care``: Use to train a text extraction model - that is tuned for healthcare applications. - """ - - model_hint: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TextSentimentDatasetMetadata(proto.Message): - r"""Dataset metadata for text sentiment. - - Attributes: - sentiment_max (int): - Required. A sentiment is expressed as an integer ordinal, - where higher value means a more positive sentiment. The - range of sentiments that will be used is between 0 and - sentiment_max (inclusive on both ends), and all the values - in the range must be represented in the dataset before a - model can be created. sentiment_max value must be between 1 - and 10 (inclusive). - """ - - sentiment_max: int = proto.Field( - proto.INT32, - number=1, - ) - - -class TextSentimentModelMetadata(proto.Message): - r"""Model metadata that is specific to text sentiment. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_extraction.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_extraction.py deleted file mode 100644 index 02769a259b6b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_extraction.py +++ /dev/null @@ -1,125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import text_segment as gca_text_segment - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TextExtractionAnnotation', - 'TextExtractionEvaluationMetrics', - }, -) - - -class TextExtractionAnnotation(proto.Message): - r"""Annotation for identifying spans of text. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - text_segment (google.cloud.automl_v1beta1.types.TextSegment): - An entity annotation will set this, which is - the part of the original text to which the - annotation pertains. - - This field is a member of `oneof`_ ``annotation``. - score (float): - Output only. A confidence estimate between - 0.0 and 1.0. A higher value means greater - confidence in correctness of the annotation. - """ - - text_segment: gca_text_segment.TextSegment = proto.Field( - proto.MESSAGE, - number=3, - oneof='annotation', - message=gca_text_segment.TextSegment, - ) - score: float = proto.Field( - proto.FLOAT, - number=1, - ) - - -class TextExtractionEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for text extraction problems. - - Attributes: - au_prc (float): - Output only. The Area under precision recall - curve metric. - confidence_metrics_entries (MutableSequence[google.cloud.automl_v1beta1.types.TextExtractionEvaluationMetrics.ConfidenceMetricsEntry]): - Output only. Metrics that have confidence - thresholds. Precision-recall curve can be - derived from it. - """ - - class ConfidenceMetricsEntry(proto.Message): - r"""Metrics for a single confidence threshold. - - Attributes: - confidence_threshold (float): - Output only. The confidence threshold value - used to compute the metrics. Only annotations - with score of at least this threshold are - considered to be ones the model would return. - recall (float): - Output only. Recall under the given - confidence threshold. - precision (float): - Output only. Precision under the given - confidence threshold. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - """ - - confidence_threshold: float = proto.Field( - proto.FLOAT, - number=1, - ) - recall: float = proto.Field( - proto.FLOAT, - number=3, - ) - precision: float = proto.Field( - proto.FLOAT, - number=4, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=5, - ) - - au_prc: float = proto.Field( - proto.FLOAT, - number=1, - ) - confidence_metrics_entries: MutableSequence[ConfidenceMetricsEntry] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=ConfidenceMetricsEntry, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_segment.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_segment.py deleted file mode 100644 index 6c6a50806fd4..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_segment.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TextSegment', - }, -) - - -class TextSegment(proto.Message): - r"""A contiguous part of a text (string), assuming it has an - UTF-8 NFC encoding. - - Attributes: - content (str): - Output only. The content of the TextSegment. - start_offset (int): - Required. Zero-based character index of the - first character of the text segment (counting - characters from the beginning of the text). - end_offset (int): - Required. Zero-based character index of the first character - past the end of the text segment (counting character from - the beginning of the text). The character at the end_offset - is NOT included in the text segment. - """ - - content: str = proto.Field( - proto.STRING, - number=3, - ) - start_offset: int = proto.Field( - proto.INT64, - number=1, - ) - end_offset: int = proto.Field( - proto.INT64, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_sentiment.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_sentiment.py deleted file mode 100644 index c6acc768d097..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/text_sentiment.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import classification - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TextSentimentAnnotation', - 'TextSentimentEvaluationMetrics', - }, -) - - -class TextSentimentAnnotation(proto.Message): - r"""Contains annotation details specific to text sentiment. - - Attributes: - sentiment (int): - Output only. The sentiment with the semantic, as given to - the - [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData] - when populating the dataset from which the model used for - the prediction had been trained. The sentiment values are - between 0 and - Dataset.text_sentiment_dataset_metadata.sentiment_max - (inclusive), with higher value meaning more positive - sentiment. They are completely relative, i.e. 0 means least - positive sentiment and sentiment_max means the most positive - from the sentiments present in the train data. Therefore - e.g. if train data had only negative sentiment, then - sentiment_max, would be still negative (although least - negative). The sentiment shouldn't be confused with "score" - or "magnitude" from the previous Natural Language Sentiment - Analysis API. - """ - - sentiment: int = proto.Field( - proto.INT32, - number=1, - ) - - -class TextSentimentEvaluationMetrics(proto.Message): - r"""Model evaluation metrics for text sentiment problems. - - Attributes: - precision (float): - Output only. Precision. - recall (float): - Output only. Recall. - f1_score (float): - Output only. The harmonic mean of recall and - precision. - mean_absolute_error (float): - Output only. Mean absolute error. Only set - for the overall model evaluation, not for - evaluation of a single annotation spec. - mean_squared_error (float): - Output only. Mean squared error. Only set for - the overall model evaluation, not for evaluation - of a single annotation spec. - linear_kappa (float): - Output only. Linear weighted kappa. Only set - for the overall model evaluation, not for - evaluation of a single annotation spec. - quadratic_kappa (float): - Output only. Quadratic weighted kappa. Only - set for the overall model evaluation, not for - evaluation of a single annotation spec. - confusion_matrix (google.cloud.automl_v1beta1.types.ClassificationEvaluationMetrics.ConfusionMatrix): - Output only. Confusion matrix of the - evaluation. Only set for the overall model - evaluation, not for evaluation of a single - annotation spec. - annotation_spec_id (MutableSequence[str]): - Output only. The annotation spec ids used for - this evaluation. Deprecated . - """ - - precision: float = proto.Field( - proto.FLOAT, - number=1, - ) - recall: float = proto.Field( - proto.FLOAT, - number=2, - ) - f1_score: float = proto.Field( - proto.FLOAT, - number=3, - ) - mean_absolute_error: float = proto.Field( - proto.FLOAT, - number=4, - ) - mean_squared_error: float = proto.Field( - proto.FLOAT, - number=5, - ) - linear_kappa: float = proto.Field( - proto.FLOAT, - number=6, - ) - quadratic_kappa: float = proto.Field( - proto.FLOAT, - number=7, - ) - confusion_matrix: classification.ClassificationEvaluationMetrics.ConfusionMatrix = proto.Field( - proto.MESSAGE, - number=8, - message=classification.ClassificationEvaluationMetrics.ConfusionMatrix, - ) - annotation_spec_id: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/translation.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/translation.py deleted file mode 100644 index 2df83a1169bc..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/translation.py +++ /dev/null @@ -1,125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.automl_v1beta1.types import data_items - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'TranslationDatasetMetadata', - 'TranslationEvaluationMetrics', - 'TranslationModelMetadata', - 'TranslationAnnotation', - }, -) - - -class TranslationDatasetMetadata(proto.Message): - r"""Dataset metadata that is specific to translation. - - Attributes: - source_language_code (str): - Required. The BCP-47 language code of the - source language. - target_language_code (str): - Required. The BCP-47 language code of the - target language. - """ - - source_language_code: str = proto.Field( - proto.STRING, - number=1, - ) - target_language_code: str = proto.Field( - proto.STRING, - number=2, - ) - - -class TranslationEvaluationMetrics(proto.Message): - r"""Evaluation metrics for the dataset. - - Attributes: - bleu_score (float): - Output only. BLEU score. - base_bleu_score (float): - Output only. BLEU score for base model. - """ - - bleu_score: float = proto.Field( - proto.DOUBLE, - number=1, - ) - base_bleu_score: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - -class TranslationModelMetadata(proto.Message): - r"""Model metadata that is specific to translation. - - Attributes: - base_model (str): - The resource name of the model to use as a baseline to train - the custom model. If unset, we use the default base model - provided by Google Translate. Format: - ``projects/{project_id}/locations/{location_id}/models/{model_id}`` - source_language_code (str): - Output only. Inferred from the dataset. - The source languge (The BCP-47 language code) - that is used for training. - target_language_code (str): - Output only. The target languge (The BCP-47 - language code) that is used for training. - """ - - base_model: str = proto.Field( - proto.STRING, - number=1, - ) - source_language_code: str = proto.Field( - proto.STRING, - number=2, - ) - target_language_code: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TranslationAnnotation(proto.Message): - r"""Annotation details specific to translation. - - Attributes: - translated_content (google.cloud.automl_v1beta1.types.TextSnippet): - Output only . The translated content. - """ - - translated_content: data_items.TextSnippet = proto.Field( - proto.MESSAGE, - number=1, - message=data_items.TextSnippet, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/video.py b/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/video.py deleted file mode 100644 index a85abc85884b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/google/cloud/automl_v1beta1/types/video.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.automl.v1beta1', - manifest={ - 'VideoClassificationDatasetMetadata', - 'VideoObjectTrackingDatasetMetadata', - 'VideoClassificationModelMetadata', - 'VideoObjectTrackingModelMetadata', - }, -) - - -class VideoClassificationDatasetMetadata(proto.Message): - r"""Dataset metadata specific to video classification. - All Video Classification datasets are treated as multi label. - - """ - - -class VideoObjectTrackingDatasetMetadata(proto.Message): - r"""Dataset metadata specific to video object tracking. - """ - - -class VideoClassificationModelMetadata(proto.Message): - r"""Model metadata specific to video classification. - """ - - -class VideoObjectTrackingModelMetadata(proto.Message): - r"""Model metadata specific to video object tracking. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-automl/v1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-automl/v1beta1/noxfile.py deleted file mode 100644 index d1158f261fb9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-automl' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/automl_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/automl_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_dataset_async.py deleted file mode 100644 index 01b33b1bab43..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_dataset_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_CreateDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_create_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - response = await client.create_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_CreateDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_dataset_sync.py deleted file mode 100644 index 8412f1fbb4a6..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_dataset_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_CreateDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_create_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.CreateDatasetRequest( - parent="parent_value", - dataset=dataset, - ) - - # Make the request - response = client.create_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_CreateDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_model_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_model_async.py deleted file mode 100644 index 3ca07c688391..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_CreateModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_create_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_CreateModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_model_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_model_sync.py deleted file mode 100644 index 084a33f3b487..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_create_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_CreateModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_create_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.CreateModelRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_CreateModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_dataset_async.py deleted file mode 100644 index 2c975fd9736c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_dataset_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_DeleteDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_delete_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_DeleteDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_dataset_sync.py deleted file mode 100644 index 4b0c122ca14a..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_dataset_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_DeleteDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_delete_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteDatasetRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_dataset(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_DeleteDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_model_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_model_async.py deleted file mode 100644 index c644cade1189..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_DeleteModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_delete_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_DeleteModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_model_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_model_sync.py deleted file mode 100644 index bca617a91c7d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_delete_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_DeleteModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_delete_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeleteModelRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_DeleteModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_deploy_model_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_deploy_model_async.py deleted file mode 100644 index 063dfb982bbe..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_deploy_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_DeployModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_deploy_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_DeployModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_deploy_model_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_deploy_model_sync.py deleted file mode 100644 index 334b62e5e94f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_deploy_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_DeployModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_deploy_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.DeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.deploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_DeployModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_data_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_data_async.py deleted file mode 100644 index b687c82ec49b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_data_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ExportData_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_export_data(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ExportData_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_data_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_data_sync.py deleted file mode 100644 index b3c8fe209f97..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_data_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ExportData_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_export_data(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.export_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ExportData_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_evaluated_examples_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_evaluated_examples_async.py deleted file mode 100644 index dcaaf6f1cb6d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_evaluated_examples_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportEvaluatedExamples -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ExportEvaluatedExamples_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_export_evaluated_examples(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportEvaluatedExamplesRequest( - name="name_value", - ) - - # Make the request - operation = client.export_evaluated_examples(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ExportEvaluatedExamples_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_evaluated_examples_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_evaluated_examples_sync.py deleted file mode 100644 index 4e7a7d1c5a54..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_evaluated_examples_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportEvaluatedExamples -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ExportEvaluatedExamples_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_export_evaluated_examples(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportEvaluatedExamplesRequest( - name="name_value", - ) - - # Make the request - operation = client.export_evaluated_examples(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ExportEvaluatedExamples_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_model_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_model_async.py deleted file mode 100644 index 7f266723fa3c..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ExportModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_export_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportModelRequest( - name="name_value", - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ExportModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_model_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_model_sync.py deleted file mode 100644 index f071389d8945..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_export_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ExportModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ExportModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_export_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ExportModelRequest( - name="name_value", - ) - - # Make the request - operation = client.export_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ExportModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_annotation_spec_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_annotation_spec_async.py deleted file mode 100644 index 926d1e1657f2..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_annotation_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAnnotationSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetAnnotationSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_get_annotation_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_annotation_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetAnnotationSpec_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_annotation_spec_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_annotation_spec_sync.py deleted file mode 100644 index 646f1ce58667..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_annotation_spec_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAnnotationSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetAnnotationSpec_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_get_annotation_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetAnnotationSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_annotation_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetAnnotationSpec_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_column_spec_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_column_spec_async.py deleted file mode 100644 index 76add81831f3..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_column_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetColumnSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetColumnSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_get_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetColumnSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_column_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetColumnSpec_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_column_spec_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_column_spec_sync.py deleted file mode 100644 index 4db4c89f8669..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_column_spec_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetColumnSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetColumnSpec_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_get_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetColumnSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_column_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetColumnSpec_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_dataset_async.py deleted file mode 100644 index b480166bc2f0..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_dataset_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_get_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_dataset_sync.py deleted file mode 100644 index 0ec8f291c5dd..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_dataset_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_get_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetDatasetRequest( - name="name_value", - ) - - # Make the request - response = client.get_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_async.py deleted file mode 100644 index 4bdfb9bed299..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_get_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_evaluation_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_evaluation_async.py deleted file mode 100644 index f1e06cdd25bf..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_evaluation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModelEvaluation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetModelEvaluation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_get_model_evaluation(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_model_evaluation(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetModelEvaluation_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_evaluation_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_evaluation_sync.py deleted file mode 100644 index bedac73412d9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_evaluation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModelEvaluation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetModelEvaluation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_get_model_evaluation(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelEvaluationRequest( - name="name_value", - ) - - # Make the request - response = client.get_model_evaluation(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetModelEvaluation_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_sync.py deleted file mode 100644 index 84cd166bcf70..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_model_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_get_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetModelRequest( - name="name_value", - ) - - # Make the request - response = client.get_model(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_table_spec_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_table_spec_async.py deleted file mode 100644 index bed73d9bf69e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_table_spec_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTableSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetTableSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_get_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetTableSpecRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetTableSpec_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_table_spec_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_table_spec_sync.py deleted file mode 100644 index 946ba47f7bfa..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_get_table_spec_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTableSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_GetTableSpec_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_get_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.GetTableSpecRequest( - name="name_value", - ) - - # Make the request - response = client.get_table_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_GetTableSpec_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_import_data_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_import_data_async.py deleted file mode 100644 index 05d7e71c47ec..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_import_data_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ImportData_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_import_data(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ImportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ImportData_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_import_data_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_import_data_sync.py deleted file mode 100644 index 8d0e34746aab..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_import_data_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ImportData -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ImportData_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_import_data(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ImportDataRequest( - name="name_value", - ) - - # Make the request - operation = client.import_data(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_ImportData_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_column_specs_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_column_specs_async.py deleted file mode 100644 index 22f41c136817..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_column_specs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListColumnSpecs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListColumnSpecs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_list_column_specs(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListColumnSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_specs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListColumnSpecs_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_column_specs_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_column_specs_sync.py deleted file mode 100644 index 4b1d29bbe20d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_column_specs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListColumnSpecs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListColumnSpecs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_list_column_specs(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListColumnSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_column_specs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListColumnSpecs_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_datasets_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_datasets_async.py deleted file mode 100644 index 2836d150316f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_datasets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatasets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListDatasets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_list_datasets(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListDatasets_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_datasets_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_datasets_sync.py deleted file mode 100644 index 4e2c8d813649..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_datasets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatasets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListDatasets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_list_datasets(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListDatasetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_datasets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListDatasets_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_model_evaluations_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_model_evaluations_async.py deleted file mode 100644 index 5db25beab36f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_model_evaluations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModelEvaluations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListModelEvaluations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_list_model_evaluations(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelEvaluationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListModelEvaluations_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_model_evaluations_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_model_evaluations_sync.py deleted file mode 100644 index e44dedba3dd8..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_model_evaluations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModelEvaluations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListModelEvaluations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_list_model_evaluations(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelEvaluationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_model_evaluations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListModelEvaluations_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_models_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_models_async.py deleted file mode 100644 index ed35474bd821..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_models_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModels -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListModels_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_list_models(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListModels_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_models_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_models_sync.py deleted file mode 100644 index 2b87e5513c86..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_models_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListModels -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListModels_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_list_models(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListModelsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_models(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListModels_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_table_specs_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_table_specs_async.py deleted file mode 100644 index e640ae9a821b..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_table_specs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTableSpecs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListTableSpecs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_list_table_specs(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListTableSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_specs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListTableSpecs_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_table_specs_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_table_specs_sync.py deleted file mode 100644 index e253bc186951..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_list_table_specs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTableSpecs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_ListTableSpecs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_list_table_specs(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.ListTableSpecsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_table_specs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END automl_v1beta1_generated_AutoMl_ListTableSpecs_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_undeploy_model_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_undeploy_model_async.py deleted file mode 100644 index 6cbf573a92bc..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_undeploy_model_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UndeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UndeployModel_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_undeploy_model(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UndeployModel_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_undeploy_model_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_undeploy_model_sync.py deleted file mode 100644 index cdcc9fc3b5e0..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_undeploy_model_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UndeployModel -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UndeployModel_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_undeploy_model(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.UndeployModelRequest( - name="name_value", - ) - - # Make the request - operation = client.undeploy_model(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UndeployModel_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_column_spec_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_column_spec_async.py deleted file mode 100644 index bf620440bbe5..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_column_spec_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateColumnSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UpdateColumnSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_update_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateColumnSpecRequest( - ) - - # Make the request - response = await client.update_column_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UpdateColumnSpec_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_column_spec_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_column_spec_sync.py deleted file mode 100644 index f9cf22be77c1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_column_spec_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateColumnSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UpdateColumnSpec_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_update_column_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateColumnSpecRequest( - ) - - # Make the request - response = client.update_column_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UpdateColumnSpec_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_dataset_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_dataset_async.py deleted file mode 100644 index 192b95bf4573..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_dataset_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UpdateDataset_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_update_dataset(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = await client.update_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UpdateDataset_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_dataset_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_dataset_sync.py deleted file mode 100644 index 41764955375f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_dataset_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataset -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UpdateDataset_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_update_dataset(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - dataset = automl_v1beta1.Dataset() - dataset.translation_dataset_metadata.source_language_code = "source_language_code_value" - dataset.translation_dataset_metadata.target_language_code = "target_language_code_value" - - request = automl_v1beta1.UpdateDatasetRequest( - dataset=dataset, - ) - - # Make the request - response = client.update_dataset(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UpdateDataset_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_table_spec_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_table_spec_async.py deleted file mode 100644 index b36014f4fb5e..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_table_spec_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTableSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UpdateTableSpec_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_update_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateTableSpecRequest( - ) - - # Make the request - response = await client.update_table_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UpdateTableSpec_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_table_spec_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_table_spec_sync.py deleted file mode 100644 index d61eccd264c9..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_auto_ml_update_table_spec_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTableSpec -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_AutoMl_UpdateTableSpec_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_update_table_spec(): - # Create a client - client = automl_v1beta1.AutoMlClient() - - # Initialize request argument(s) - request = automl_v1beta1.UpdateTableSpecRequest( - ) - - # Make the request - response = client.update_table_spec(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_AutoMl_UpdateTableSpec_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_batch_predict_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_batch_predict_async.py deleted file mode 100644 index 8554c4b76368..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_batch_predict_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchPredict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_PredictionService_BatchPredict_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_batch_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - request = automl_v1beta1.BatchPredictRequest( - name="name_value", - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_PredictionService_BatchPredict_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_batch_predict_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_batch_predict_sync.py deleted file mode 100644 index f6d314ce4a80..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_batch_predict_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for BatchPredict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_PredictionService_BatchPredict_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_batch_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceClient() - - # Initialize request argument(s) - request = automl_v1beta1.BatchPredictRequest( - name="name_value", - ) - - # Make the request - operation = client.batch_predict(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_PredictionService_BatchPredict_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_predict_async.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_predict_async.py deleted file mode 100644 index afa2b0fd37f1..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_predict_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Predict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_PredictionService_Predict_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -async def sample_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceAsyncClient() - - # Initialize request argument(s) - payload = automl_v1beta1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1beta1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = await client.predict(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_PredictionService_Predict_async] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_predict_sync.py b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_predict_sync.py deleted file mode 100644 index f0bc86adc2f6..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/automl_v1beta1_generated_prediction_service_predict_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Predict -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-automl - - -# [START automl_v1beta1_generated_PredictionService_Predict_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import automl_v1beta1 - - -def sample_predict(): - # Create a client - client = automl_v1beta1.PredictionServiceClient() - - # Initialize request argument(s) - payload = automl_v1beta1.ExamplePayload() - payload.image.image_bytes = b'image_bytes_blob' - - request = automl_v1beta1.PredictRequest( - name="name_value", - payload=payload, - ) - - # Make the request - response = client.predict(request=request) - - # Handle the response - print(response) - -# [END automl_v1beta1_generated_PredictionService_Predict_sync] diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json b/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json deleted file mode 100644 index 3957ec9b1b9f..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json +++ /dev/null @@ -1,4289 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.automl.v1beta1", - "version": "v1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-automl", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.create_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.CreateDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.CreateDatasetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1beta1.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Dataset", - "shortName": "create_dataset" - }, - "description": "Sample for CreateDataset", - "file": "automl_v1beta1_generated_auto_ml_create_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_CreateDataset_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_create_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.create_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.CreateDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.CreateDatasetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1beta1.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Dataset", - "shortName": "create_dataset" - }, - "description": "Sample for CreateDataset", - "file": "automl_v1beta1_generated_auto_ml_create_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_CreateDataset_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_create_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.create_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.CreateModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.CreateModelRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "google.cloud.automl_v1beta1.types.Model" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_model" - }, - "description": "Sample for CreateModel", - "file": "automl_v1beta1_generated_auto_ml_create_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_CreateModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_create_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.create_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.CreateModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "CreateModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.CreateModelRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "model", - "type": "google.cloud.automl_v1beta1.types.Model" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_model" - }, - "description": "Sample for CreateModel", - "file": "automl_v1beta1_generated_auto_ml_create_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_CreateModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_create_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.delete_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.DeleteDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.DeleteDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_dataset" - }, - "description": "Sample for DeleteDataset", - "file": "automl_v1beta1_generated_auto_ml_delete_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_DeleteDataset_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_delete_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.delete_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.DeleteDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.DeleteDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_dataset" - }, - "description": "Sample for DeleteDataset", - "file": "automl_v1beta1_generated_auto_ml_delete_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_DeleteDataset_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_delete_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.delete_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.DeleteModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.DeleteModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_model" - }, - "description": "Sample for DeleteModel", - "file": "automl_v1beta1_generated_auto_ml_delete_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_DeleteModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_delete_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.delete_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.DeleteModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeleteModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.DeleteModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_model" - }, - "description": "Sample for DeleteModel", - "file": "automl_v1beta1_generated_auto_ml_delete_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_DeleteModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_delete_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.deploy_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.DeployModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.DeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "deploy_model" - }, - "description": "Sample for DeployModel", - "file": "automl_v1beta1_generated_auto_ml_deploy_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_DeployModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_deploy_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.deploy_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.DeployModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "DeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.DeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "deploy_model" - }, - "description": "Sample for DeployModel", - "file": "automl_v1beta1_generated_auto_ml_deploy_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_DeployModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_deploy_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.export_data", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ExportData", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ExportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.OutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_data" - }, - "description": "Sample for ExportData", - "file": "automl_v1beta1_generated_auto_ml_export_data_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ExportData_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_export_data_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.export_data", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ExportData", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ExportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.OutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_data" - }, - "description": "Sample for ExportData", - "file": "automl_v1beta1_generated_auto_ml_export_data_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ExportData_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_export_data_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.export_evaluated_examples", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportEvaluatedExamples" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_evaluated_examples" - }, - "description": "Sample for ExportEvaluatedExamples", - "file": "automl_v1beta1_generated_auto_ml_export_evaluated_examples_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ExportEvaluatedExamples_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_export_evaluated_examples_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.export_evaluated_examples", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportEvaluatedExamples" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.ExportEvaluatedExamplesOutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_evaluated_examples" - }, - "description": "Sample for ExportEvaluatedExamples", - "file": "automl_v1beta1_generated_auto_ml_export_evaluated_examples_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ExportEvaluatedExamples_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_export_evaluated_examples_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.export_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ExportModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ExportModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.ModelExportOutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_model" - }, - "description": "Sample for ExportModel", - "file": "automl_v1beta1_generated_auto_ml_export_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ExportModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_export_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.export_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ExportModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ExportModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ExportModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.ModelExportOutputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_model" - }, - "description": "Sample for ExportModel", - "file": "automl_v1beta1_generated_auto_ml_export_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ExportModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_export_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.get_annotation_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetAnnotationSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetAnnotationSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.AnnotationSpec", - "shortName": "get_annotation_spec" - }, - "description": "Sample for GetAnnotationSpec", - "file": "automl_v1beta1_generated_auto_ml_get_annotation_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetAnnotationSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_annotation_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.get_annotation_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetAnnotationSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetAnnotationSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.AnnotationSpec", - "shortName": "get_annotation_spec" - }, - "description": "Sample for GetAnnotationSpec", - "file": "automl_v1beta1_generated_auto_ml_get_annotation_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetAnnotationSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_annotation_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.get_column_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetColumnSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetColumnSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetColumnSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.ColumnSpec", - "shortName": "get_column_spec" - }, - "description": "Sample for GetColumnSpec", - "file": "automl_v1beta1_generated_auto_ml_get_column_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetColumnSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_column_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.get_column_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetColumnSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetColumnSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetColumnSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.ColumnSpec", - "shortName": "get_column_spec" - }, - "description": "Sample for GetColumnSpec", - "file": "automl_v1beta1_generated_auto_ml_get_column_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetColumnSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_column_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.get_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Dataset", - "shortName": "get_dataset" - }, - "description": "Sample for GetDataset", - "file": "automl_v1beta1_generated_auto_ml_get_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetDataset_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.get_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetDatasetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Dataset", - "shortName": "get_dataset" - }, - "description": "Sample for GetDataset", - "file": "automl_v1beta1_generated_auto_ml_get_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetDataset_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.get_model_evaluation", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModelEvaluation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetModelEvaluationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.ModelEvaluation", - "shortName": "get_model_evaluation" - }, - "description": "Sample for GetModelEvaluation", - "file": "automl_v1beta1_generated_auto_ml_get_model_evaluation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetModelEvaluation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_model_evaluation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.get_model_evaluation", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModelEvaluation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetModelEvaluationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.ModelEvaluation", - "shortName": "get_model_evaluation" - }, - "description": "Sample for GetModelEvaluation", - "file": "automl_v1beta1_generated_auto_ml_get_model_evaluation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetModelEvaluation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_model_evaluation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.get_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Model", - "shortName": "get_model" - }, - "description": "Sample for GetModel", - "file": "automl_v1beta1_generated_auto_ml_get_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetModel_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.get_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Model", - "shortName": "get_model" - }, - "description": "Sample for GetModel", - "file": "automl_v1beta1_generated_auto_ml_get_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetModel_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.get_table_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetTableSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetTableSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetTableSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.TableSpec", - "shortName": "get_table_spec" - }, - "description": "Sample for GetTableSpec", - "file": "automl_v1beta1_generated_auto_ml_get_table_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetTableSpec_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_table_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.get_table_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.GetTableSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "GetTableSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.GetTableSpecRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.TableSpec", - "shortName": "get_table_spec" - }, - "description": "Sample for GetTableSpec", - "file": "automl_v1beta1_generated_auto_ml_get_table_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_GetTableSpec_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_get_table_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.import_data", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ImportData", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ImportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ImportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1beta1.types.InputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "import_data" - }, - "description": "Sample for ImportData", - "file": "automl_v1beta1_generated_auto_ml_import_data_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ImportData_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_import_data_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.import_data", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ImportData", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ImportData" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ImportDataRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1beta1.types.InputConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "import_data" - }, - "description": "Sample for ImportData", - "file": "automl_v1beta1_generated_auto_ml_import_data_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ImportData_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_import_data_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.list_column_specs", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListColumnSpecs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListColumnSpecsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListColumnSpecsAsyncPager", - "shortName": "list_column_specs" - }, - "description": "Sample for ListColumnSpecs", - "file": "automl_v1beta1_generated_auto_ml_list_column_specs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListColumnSpecs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_column_specs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.list_column_specs", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListColumnSpecs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListColumnSpecsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListColumnSpecsPager", - "shortName": "list_column_specs" - }, - "description": "Sample for ListColumnSpecs", - "file": "automl_v1beta1_generated_auto_ml_list_column_specs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListColumnSpecs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_column_specs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.list_datasets", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListDatasets", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListDatasets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListDatasetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListDatasetsAsyncPager", - "shortName": "list_datasets" - }, - "description": "Sample for ListDatasets", - "file": "automl_v1beta1_generated_auto_ml_list_datasets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListDatasets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_datasets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.list_datasets", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListDatasets", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListDatasets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListDatasetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListDatasetsPager", - "shortName": "list_datasets" - }, - "description": "Sample for ListDatasets", - "file": "automl_v1beta1_generated_auto_ml_list_datasets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListDatasets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_datasets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.list_model_evaluations", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModelEvaluations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListModelEvaluationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelEvaluationsAsyncPager", - "shortName": "list_model_evaluations" - }, - "description": "Sample for ListModelEvaluations", - "file": "automl_v1beta1_generated_auto_ml_list_model_evaluations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListModelEvaluations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_model_evaluations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.list_model_evaluations", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModelEvaluations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListModelEvaluationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelEvaluationsPager", - "shortName": "list_model_evaluations" - }, - "description": "Sample for ListModelEvaluations", - "file": "automl_v1beta1_generated_auto_ml_list_model_evaluations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListModelEvaluations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_model_evaluations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.list_models", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListModels", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModels" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListModelsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelsAsyncPager", - "shortName": "list_models" - }, - "description": "Sample for ListModels", - "file": "automl_v1beta1_generated_auto_ml_list_models_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListModels_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_models_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.list_models", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListModels", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListModels" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListModelsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListModelsPager", - "shortName": "list_models" - }, - "description": "Sample for ListModels", - "file": "automl_v1beta1_generated_auto_ml_list_models_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListModels_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_models_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.list_table_specs", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListTableSpecs", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListTableSpecs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListTableSpecsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListTableSpecsAsyncPager", - "shortName": "list_table_specs" - }, - "description": "Sample for ListTableSpecs", - "file": "automl_v1beta1_generated_auto_ml_list_table_specs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListTableSpecs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_table_specs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.list_table_specs", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.ListTableSpecs", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "ListTableSpecs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.ListTableSpecsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.services.auto_ml.pagers.ListTableSpecsPager", - "shortName": "list_table_specs" - }, - "description": "Sample for ListTableSpecs", - "file": "automl_v1beta1_generated_auto_ml_list_table_specs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_ListTableSpecs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_list_table_specs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.undeploy_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UndeployModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UndeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UndeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "undeploy_model" - }, - "description": "Sample for UndeployModel", - "file": "automl_v1beta1_generated_auto_ml_undeploy_model_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UndeployModel_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_undeploy_model_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.undeploy_model", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UndeployModel", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UndeployModel" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UndeployModelRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "undeploy_model" - }, - "description": "Sample for UndeployModel", - "file": "automl_v1beta1_generated_auto_ml_undeploy_model_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UndeployModel_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_undeploy_model_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.update_column_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateColumnSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UpdateColumnSpecRequest" - }, - { - "name": "column_spec", - "type": "google.cloud.automl_v1beta1.types.ColumnSpec" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.ColumnSpec", - "shortName": "update_column_spec" - }, - "description": "Sample for UpdateColumnSpec", - "file": "automl_v1beta1_generated_auto_ml_update_column_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UpdateColumnSpec_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_update_column_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.update_column_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateColumnSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UpdateColumnSpecRequest" - }, - { - "name": "column_spec", - "type": "google.cloud.automl_v1beta1.types.ColumnSpec" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.ColumnSpec", - "shortName": "update_column_spec" - }, - "description": "Sample for UpdateColumnSpec", - "file": "automl_v1beta1_generated_auto_ml_update_column_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UpdateColumnSpec_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_update_column_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.update_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UpdateDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UpdateDatasetRequest" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1beta1.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Dataset", - "shortName": "update_dataset" - }, - "description": "Sample for UpdateDataset", - "file": "automl_v1beta1_generated_auto_ml_update_dataset_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UpdateDataset_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_update_dataset_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.update_dataset", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UpdateDataset", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateDataset" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UpdateDatasetRequest" - }, - { - "name": "dataset", - "type": "google.cloud.automl_v1beta1.types.Dataset" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.Dataset", - "shortName": "update_dataset" - }, - "description": "Sample for UpdateDataset", - "file": "automl_v1beta1_generated_auto_ml_update_dataset_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UpdateDataset_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_update_dataset_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient", - "shortName": "AutoMlAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlAsyncClient.update_table_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateTableSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UpdateTableSpecRequest" - }, - { - "name": "table_spec", - "type": "google.cloud.automl_v1beta1.types.TableSpec" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.TableSpec", - "shortName": "update_table_spec" - }, - "description": "Sample for UpdateTableSpec", - "file": "automl_v1beta1_generated_auto_ml_update_table_spec_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UpdateTableSpec_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_update_table_spec_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.AutoMlClient", - "shortName": "AutoMlClient" - }, - "fullName": "google.cloud.automl_v1beta1.AutoMlClient.update_table_spec", - "method": { - "fullName": "google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec", - "service": { - "fullName": "google.cloud.automl.v1beta1.AutoMl", - "shortName": "AutoMl" - }, - "shortName": "UpdateTableSpec" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.UpdateTableSpecRequest" - }, - { - "name": "table_spec", - "type": "google.cloud.automl_v1beta1.types.TableSpec" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.TableSpec", - "shortName": "update_table_spec" - }, - "description": "Sample for UpdateTableSpec", - "file": "automl_v1beta1_generated_auto_ml_update_table_spec_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_AutoMl_UpdateTableSpec_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_auto_ml_update_table_spec_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.PredictionServiceAsyncClient", - "shortName": "PredictionServiceAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.PredictionServiceAsyncClient.batch_predict", - "method": { - "fullName": "google.cloud.automl.v1beta1.PredictionService.BatchPredict", - "service": { - "fullName": "google.cloud.automl.v1beta1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "BatchPredict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.BatchPredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1beta1.types.BatchPredictInputConfig" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.BatchPredictOutputConfig" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "batch_predict" - }, - "description": "Sample for BatchPredict", - "file": "automl_v1beta1_generated_prediction_service_batch_predict_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_PredictionService_BatchPredict_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_prediction_service_batch_predict_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.PredictionServiceClient", - "shortName": "PredictionServiceClient" - }, - "fullName": "google.cloud.automl_v1beta1.PredictionServiceClient.batch_predict", - "method": { - "fullName": "google.cloud.automl.v1beta1.PredictionService.BatchPredict", - "service": { - "fullName": "google.cloud.automl.v1beta1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "BatchPredict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.BatchPredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "input_config", - "type": "google.cloud.automl_v1beta1.types.BatchPredictInputConfig" - }, - { - "name": "output_config", - "type": "google.cloud.automl_v1beta1.types.BatchPredictOutputConfig" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "batch_predict" - }, - "description": "Sample for BatchPredict", - "file": "automl_v1beta1_generated_prediction_service_batch_predict_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_PredictionService_BatchPredict_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_prediction_service_batch_predict_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.automl_v1beta1.PredictionServiceAsyncClient", - "shortName": "PredictionServiceAsyncClient" - }, - "fullName": "google.cloud.automl_v1beta1.PredictionServiceAsyncClient.predict", - "method": { - "fullName": "google.cloud.automl.v1beta1.PredictionService.Predict", - "service": { - "fullName": "google.cloud.automl.v1beta1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "Predict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.PredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "payload", - "type": "google.cloud.automl_v1beta1.types.ExamplePayload" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.PredictResponse", - "shortName": "predict" - }, - "description": "Sample for Predict", - "file": "automl_v1beta1_generated_prediction_service_predict_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_PredictionService_Predict_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_prediction_service_predict_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.automl_v1beta1.PredictionServiceClient", - "shortName": "PredictionServiceClient" - }, - "fullName": "google.cloud.automl_v1beta1.PredictionServiceClient.predict", - "method": { - "fullName": "google.cloud.automl.v1beta1.PredictionService.Predict", - "service": { - "fullName": "google.cloud.automl.v1beta1.PredictionService", - "shortName": "PredictionService" - }, - "shortName": "Predict" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.automl_v1beta1.types.PredictRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "payload", - "type": "google.cloud.automl_v1beta1.types.ExamplePayload" - }, - { - "name": "params", - "type": "MutableMapping[str, str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.automl_v1beta1.types.PredictResponse", - "shortName": "predict" - }, - "description": "Sample for Predict", - "file": "automl_v1beta1_generated_prediction_service_predict_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "automl_v1beta1_generated_PredictionService_Predict_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "automl_v1beta1_generated_prediction_service_predict_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/scripts/fixup_automl_v1beta1_keywords.py b/owl-bot-staging/google-cloud-automl/v1beta1/scripts/fixup_automl_v1beta1_keywords.py deleted file mode 100644 index 0137b26e0453..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/scripts/fixup_automl_v1beta1_keywords.py +++ /dev/null @@ -1,201 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class automlCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_predict': ('name', 'input_config', 'output_config', 'params', ), - 'create_dataset': ('parent', 'dataset', ), - 'create_model': ('parent', 'model', ), - 'delete_dataset': ('name', ), - 'delete_model': ('name', ), - 'deploy_model': ('name', 'image_object_detection_model_deployment_metadata', 'image_classification_model_deployment_metadata', ), - 'export_data': ('name', 'output_config', ), - 'export_evaluated_examples': ('name', 'output_config', ), - 'export_model': ('name', 'output_config', ), - 'get_annotation_spec': ('name', ), - 'get_column_spec': ('name', 'field_mask', ), - 'get_dataset': ('name', ), - 'get_model': ('name', ), - 'get_model_evaluation': ('name', ), - 'get_table_spec': ('name', 'field_mask', ), - 'import_data': ('name', 'input_config', ), - 'list_column_specs': ('parent', 'field_mask', 'filter', 'page_size', 'page_token', ), - 'list_datasets': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_model_evaluations': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_models': ('parent', 'filter', 'page_size', 'page_token', ), - 'list_table_specs': ('parent', 'field_mask', 'filter', 'page_size', 'page_token', ), - 'predict': ('name', 'payload', 'params', ), - 'undeploy_model': ('name', ), - 'update_column_spec': ('column_spec', 'update_mask', ), - 'update_dataset': ('dataset', 'update_mask', ), - 'update_table_spec': ('table_spec', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=automlCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the automl client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/setup.py b/owl-bot-staging/google-cloud-automl/v1beta1/setup.py deleted file mode 100644 index 96aac7a718a2..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-automl' - - -description = "Google Cloud Automl API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/automl/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/__init__.py b/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/test_auto_ml.py b/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/test_auto_ml.py deleted file mode 100644 index 07c204d8c90d..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/test_auto_ml.py +++ /dev/null @@ -1,19441 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.automl_v1beta1.services.auto_ml import AutoMlAsyncClient -from google.cloud.automl_v1beta1.services.auto_ml import AutoMlClient -from google.cloud.automl_v1beta1.services.auto_ml import pagers -from google.cloud.automl_v1beta1.services.auto_ml import transports -from google.cloud.automl_v1beta1.types import annotation_spec -from google.cloud.automl_v1beta1.types import classification -from google.cloud.automl_v1beta1.types import column_spec -from google.cloud.automl_v1beta1.types import column_spec as gca_column_spec -from google.cloud.automl_v1beta1.types import data_stats -from google.cloud.automl_v1beta1.types import data_types -from google.cloud.automl_v1beta1.types import dataset -from google.cloud.automl_v1beta1.types import dataset as gca_dataset -from google.cloud.automl_v1beta1.types import detection -from google.cloud.automl_v1beta1.types import image -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import model -from google.cloud.automl_v1beta1.types import model as gca_model -from google.cloud.automl_v1beta1.types import model_evaluation -from google.cloud.automl_v1beta1.types import operations -from google.cloud.automl_v1beta1.types import regression -from google.cloud.automl_v1beta1.types import service -from google.cloud.automl_v1beta1.types import table_spec -from google.cloud.automl_v1beta1.types import table_spec as gca_table_spec -from google.cloud.automl_v1beta1.types import tables -from google.cloud.automl_v1beta1.types import text -from google.cloud.automl_v1beta1.types import text_extraction -from google.cloud.automl_v1beta1.types import text_sentiment -from google.cloud.automl_v1beta1.types import translation -from google.cloud.automl_v1beta1.types import video -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AutoMlClient._get_default_mtls_endpoint(None) is None - assert AutoMlClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AutoMlClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AutoMlClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AutoMlClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AutoMlClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AutoMlClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AutoMlClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AutoMlClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AutoMlClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AutoMlClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AutoMlClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AutoMlClient._get_client_cert_source(None, False) is None - assert AutoMlClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AutoMlClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AutoMlClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AutoMlClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AutoMlClient._DEFAULT_UNIVERSE - default_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AutoMlClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AutoMlClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AutoMlClient.DEFAULT_MTLS_ENDPOINT - assert AutoMlClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AutoMlClient._get_api_endpoint(None, None, default_universe, "always") == AutoMlClient.DEFAULT_MTLS_ENDPOINT - assert AutoMlClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AutoMlClient.DEFAULT_MTLS_ENDPOINT - assert AutoMlClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AutoMlClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AutoMlClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AutoMlClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AutoMlClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AutoMlClient._get_universe_domain(None, None) == AutoMlClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AutoMlClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AutoMlClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AutoMlClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AutoMlClient, "grpc"), - (AutoMlAsyncClient, "grpc_asyncio"), - (AutoMlClient, "rest"), -]) -def test_auto_ml_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AutoMlGrpcTransport, "grpc"), - (transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AutoMlRestTransport, "rest"), -]) -def test_auto_ml_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AutoMlClient, "grpc"), - (AutoMlAsyncClient, "grpc_asyncio"), - (AutoMlClient, "rest"), -]) -def test_auto_ml_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -def test_auto_ml_client_get_transport_class(): - transport = AutoMlClient.get_transport_class() - available_transports = [ - transports.AutoMlGrpcTransport, - transports.AutoMlRestTransport, - ] - assert transport in available_transports - - transport = AutoMlClient.get_transport_class("grpc") - assert transport == transports.AutoMlGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio"), - (AutoMlClient, transports.AutoMlRestTransport, "rest"), -]) -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -def test_auto_ml_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AutoMlClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AutoMlClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", "true"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", "false"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AutoMlClient, transports.AutoMlRestTransport, "rest", "true"), - (AutoMlClient, transports.AutoMlRestTransport, "rest", "false"), -]) -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_auto_ml_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AutoMlClient, AutoMlAsyncClient -]) -@mock.patch.object(AutoMlClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AutoMlAsyncClient)) -def test_auto_ml_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AutoMlClient, AutoMlAsyncClient -]) -@mock.patch.object(AutoMlClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlClient)) -@mock.patch.object(AutoMlAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AutoMlAsyncClient)) -def test_auto_ml_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AutoMlClient._DEFAULT_UNIVERSE - default_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AutoMlClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc"), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio"), - (AutoMlClient, transports.AutoMlRestTransport, "rest"), -]) -def test_auto_ml_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", grpc_helpers), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AutoMlClient, transports.AutoMlRestTransport, "rest", None), -]) -def test_auto_ml_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_auto_ml_client_client_options_from_dict(): - with mock.patch('google.cloud.automl_v1beta1.services.auto_ml.transports.AutoMlGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AutoMlClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AutoMlClient, transports.AutoMlGrpcTransport, "grpc", grpc_helpers), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_auto_ml_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateDatasetRequest, - dict, -]) -def test_create_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - response = client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -def test_create_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateDatasetRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateDatasetRequest( - parent='parent_value', - ) - -def test_create_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc - request = {} - client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_dataset] = mock_rpc - - request = {} - await client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_dataset_async(transport: str = 'grpc_asyncio', request_type=service.CreateDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - response = await client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_dataset_async_from_dict(): - await test_create_dataset_async(request_type=dict) - -def test_create_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateDatasetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value = gca_dataset.Dataset() - client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateDatasetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset()) - await client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dataset( - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - - -def test_create_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dataset( - service.CreateDatasetRequest(), - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - -@pytest.mark.asyncio -async def test_create_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dataset( - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dataset( - service.CreateDatasetRequest(), - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetDatasetRequest, - dict, -]) -def test_get_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - response = client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -def test_get_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetDatasetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetDatasetRequest( - name='name_value', - ) - -def test_get_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc - request = {} - client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_dataset] = mock_rpc - - request = {} - await client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_dataset_async(transport: str = 'grpc_asyncio', request_type=service.GetDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - response = await client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_dataset_async_from_dict(): - await test_get_dataset_async(request_type=dict) - -def test_get_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value = dataset.Dataset() - client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) - await client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dataset( - service.GetDatasetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataset.Dataset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dataset( - service.GetDatasetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListDatasetsRequest, - dict, -]) -def test_list_datasets(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListDatasetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListDatasetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_datasets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListDatasetsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_datasets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListDatasetsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_datasets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_datasets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc - request = {} - client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_datasets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_datasets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_datasets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_datasets] = mock_rpc - - request = {} - await client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_datasets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_datasets_async(transport: str = 'grpc_asyncio', request_type=service.ListDatasetsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListDatasetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_datasets_async_from_dict(): - await test_list_datasets_async(request_type=dict) - -def test_list_datasets_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListDatasetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value = service.ListDatasetsResponse() - client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_datasets_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListDatasetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse()) - await client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_datasets_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListDatasetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_datasets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_datasets_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_datasets( - service.ListDatasetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_datasets_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListDatasetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_datasets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_datasets_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_datasets( - service.ListDatasetsRequest(), - parent='parent_value', - ) - - -def test_list_datasets_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_datasets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) - for i in results) -def test_list_datasets_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = list(client.list_datasets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_datasets_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_datasets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataset.Dataset) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_datasets_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_datasets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.UpdateDatasetRequest, - dict, -]) -def test_update_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - response = client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -def test_update_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateDatasetRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateDatasetRequest( - ) - -def test_update_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_dataset] = mock_rpc - request = {} - client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_dataset] = mock_rpc - - request = {} - await client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_dataset_async(transport: str = 'grpc_asyncio', request_type=service.UpdateDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - response = await client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_dataset_async_from_dict(): - await test_update_dataset_async(request_type=dict) - -def test_update_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateDatasetRequest() - - request.dataset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value = gca_dataset.Dataset() - client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'dataset.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateDatasetRequest() - - request.dataset.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset()) - await client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'dataset.name=name_value', - ) in kw['metadata'] - - -def test_update_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_dataset( - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - - -def test_update_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dataset( - service.UpdateDatasetRequest(), - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - -@pytest.mark.asyncio -async def test_update_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_dataset.Dataset() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_dataset( - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].dataset - mock_val = gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_dataset( - service.UpdateDatasetRequest(), - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteDatasetRequest, - dict, -]) -def test_delete_dataset(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_dataset_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteDatasetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_dataset(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteDatasetRequest( - name='name_value', - ) - -def test_delete_dataset_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc - request = {} - client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_dataset_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_dataset in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_dataset] = mock_rpc - - request = {} - await client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_dataset_async(transport: str = 'grpc_asyncio', request_type=service.DeleteDatasetRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteDatasetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_dataset_async_from_dict(): - await test_delete_dataset_async(request_type=dict) - -def test_delete_dataset_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dataset_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteDatasetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_dataset_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_dataset_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dataset( - service.DeleteDatasetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_dataset_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dataset( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_dataset_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dataset( - service.DeleteDatasetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ImportDataRequest, - dict, -]) -def test_import_data(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ImportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_import_data_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ImportDataRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.import_data(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ImportDataRequest( - name='name_value', - ) - -def test_import_data_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.import_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.import_data] = mock_rpc - request = {} - client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_import_data_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.import_data in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.import_data] = mock_rpc - - request = {} - await client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.import_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_import_data_async(transport: str = 'grpc_asyncio', request_type=service.ImportDataRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ImportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_import_data_async_from_dict(): - await test_import_data_async(request_type=dict) - -def test_import_data_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ImportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_import_data_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ImportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_import_data_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.import_data( - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - - -def test_import_data_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_data( - service.ImportDataRequest(), - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - -@pytest.mark.asyncio -async def test_import_data_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.import_data( - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_import_data_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.import_data( - service.ImportDataRequest(), - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - -@pytest.mark.parametrize("request_type", [ - service.ExportDataRequest, - dict, -]) -def test_export_data(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ExportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_data_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ExportDataRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_data(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ExportDataRequest( - name='name_value', - ) - -def test_export_data_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_data] = mock_rpc - request = {} - client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_data_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.export_data in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_data] = mock_rpc - - request = {} - await client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_data_async(transport: str = 'grpc_asyncio', request_type=service.ExportDataRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ExportDataRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_data_async_from_dict(): - await test_export_data_async(request_type=dict) - -def test_export_data_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_data_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportDataRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_export_data_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_data( - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - - -def test_export_data_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_data( - service.ExportDataRequest(), - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - -@pytest.mark.asyncio -async def test_export_data_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_data( - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_data_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_data( - service.ExportDataRequest(), - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetAnnotationSpecRequest, - dict, -]) -def test_get_annotation_spec(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - ) - response = client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetAnnotationSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, annotation_spec.AnnotationSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.example_count == 1396 - - -def test_get_annotation_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetAnnotationSpecRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_annotation_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetAnnotationSpecRequest( - name='name_value', - ) - -def test_get_annotation_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_annotation_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_annotation_spec] = mock_rpc - request = {} - client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_annotation_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_annotation_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_annotation_spec in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_annotation_spec] = mock_rpc - - request = {} - await client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_annotation_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_annotation_spec_async(transport: str = 'grpc_asyncio', request_type=service.GetAnnotationSpecRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - )) - response = await client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetAnnotationSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, annotation_spec.AnnotationSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.example_count == 1396 - - -@pytest.mark.asyncio -async def test_get_annotation_spec_async_from_dict(): - await test_get_annotation_spec_async(request_type=dict) - -def test_get_annotation_spec_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAnnotationSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value = annotation_spec.AnnotationSpec() - client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_annotation_spec_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetAnnotationSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec()) - await client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_annotation_spec_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = annotation_spec.AnnotationSpec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_annotation_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_annotation_spec_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_annotation_spec( - service.GetAnnotationSpecRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_annotation_spec_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = annotation_spec.AnnotationSpec() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_annotation_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_annotation_spec_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_annotation_spec( - service.GetAnnotationSpecRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetTableSpecRequest, - dict, -]) -def test_get_table_spec(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - ) - response = client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetTableSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, table_spec.TableSpec) - assert response.name == 'name_value' - assert response.time_column_spec_id == 'time_column_spec_id_value' - assert response.row_count == 992 - assert response.valid_row_count == 1615 - assert response.column_count == 1302 - assert response.etag == 'etag_value' - - -def test_get_table_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetTableSpecRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_table_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetTableSpecRequest( - name='name_value', - ) - -def test_get_table_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table_spec] = mock_rpc - request = {} - client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_table_spec in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_table_spec] = mock_rpc - - request = {} - await client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_table_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_spec_async(transport: str = 'grpc_asyncio', request_type=service.GetTableSpecRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - )) - response = await client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetTableSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, table_spec.TableSpec) - assert response.name == 'name_value' - assert response.time_column_spec_id == 'time_column_spec_id_value' - assert response.row_count == 992 - assert response.valid_row_count == 1615 - assert response.column_count == 1302 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_table_spec_async_from_dict(): - await test_get_table_spec_async(request_type=dict) - -def test_get_table_spec_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetTableSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - call.return_value = table_spec.TableSpec() - client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_table_spec_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetTableSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table_spec.TableSpec()) - await client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_table_spec_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = table_spec.TableSpec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_table_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_table_spec_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table_spec( - service.GetTableSpecRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_table_spec_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = table_spec.TableSpec() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table_spec.TableSpec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_table_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_table_spec_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_table_spec( - service.GetTableSpecRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListTableSpecsRequest, - dict, -]) -def test_list_table_specs(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTableSpecsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListTableSpecsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTableSpecsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_table_specs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListTableSpecsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_table_specs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListTableSpecsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_table_specs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_table_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_table_specs] = mock_rpc - request = {} - client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_table_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_table_specs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_table_specs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_table_specs] = mock_rpc - - request = {} - await client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_table_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_table_specs_async(transport: str = 'grpc_asyncio', request_type=service.ListTableSpecsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListTableSpecsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListTableSpecsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTableSpecsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_table_specs_async_from_dict(): - await test_list_table_specs_async(request_type=dict) - -def test_list_table_specs_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListTableSpecsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - call.return_value = service.ListTableSpecsResponse() - client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_table_specs_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListTableSpecsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTableSpecsResponse()) - await client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_table_specs_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTableSpecsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_table_specs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_table_specs_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_table_specs( - service.ListTableSpecsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_table_specs_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListTableSpecsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTableSpecsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_table_specs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_table_specs_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_table_specs( - service.ListTableSpecsRequest(), - parent='parent_value', - ) - - -def test_list_table_specs_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - next_page_token='abc', - ), - service.ListTableSpecsResponse( - table_specs=[], - next_page_token='def', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - ], - next_page_token='ghi', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_table_specs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, table_spec.TableSpec) - for i in results) -def test_list_table_specs_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - next_page_token='abc', - ), - service.ListTableSpecsResponse( - table_specs=[], - next_page_token='def', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - ], - next_page_token='ghi', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - ), - RuntimeError, - ) - pages = list(client.list_table_specs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_table_specs_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - next_page_token='abc', - ), - service.ListTableSpecsResponse( - table_specs=[], - next_page_token='def', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - ], - next_page_token='ghi', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_table_specs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, table_spec.TableSpec) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_table_specs_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - next_page_token='abc', - ), - service.ListTableSpecsResponse( - table_specs=[], - next_page_token='def', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - ], - next_page_token='ghi', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_table_specs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.UpdateTableSpecRequest, - dict, -]) -def test_update_table_spec(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - ) - response = client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateTableSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_table_spec.TableSpec) - assert response.name == 'name_value' - assert response.time_column_spec_id == 'time_column_spec_id_value' - assert response.row_count == 992 - assert response.valid_row_count == 1615 - assert response.column_count == 1302 - assert response.etag == 'etag_value' - - -def test_update_table_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateTableSpecRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_table_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateTableSpecRequest( - ) - -def test_update_table_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_table_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_table_spec] = mock_rpc - request = {} - client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_table_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_table_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_table_spec in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_table_spec] = mock_rpc - - request = {} - await client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_table_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_table_spec_async(transport: str = 'grpc_asyncio', request_type=service.UpdateTableSpecRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gca_table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - )) - response = await client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateTableSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_table_spec.TableSpec) - assert response.name == 'name_value' - assert response.time_column_spec_id == 'time_column_spec_id_value' - assert response.row_count == 992 - assert response.valid_row_count == 1615 - assert response.column_count == 1302 - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_table_spec_async_from_dict(): - await test_update_table_spec_async(request_type=dict) - -def test_update_table_spec_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateTableSpecRequest() - - request.table_spec.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - call.return_value = gca_table_spec.TableSpec() - client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'table_spec.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_table_spec_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateTableSpecRequest() - - request.table_spec.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_table_spec.TableSpec()) - await client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'table_spec.name=name_value', - ) in kw['metadata'] - - -def test_update_table_spec_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_table_spec.TableSpec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_table_spec( - table_spec=gca_table_spec.TableSpec(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].table_spec - mock_val = gca_table_spec.TableSpec(name='name_value') - assert arg == mock_val - - -def test_update_table_spec_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_table_spec( - service.UpdateTableSpecRequest(), - table_spec=gca_table_spec.TableSpec(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_table_spec_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_table_spec.TableSpec() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_table_spec.TableSpec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_table_spec( - table_spec=gca_table_spec.TableSpec(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].table_spec - mock_val = gca_table_spec.TableSpec(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_table_spec_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_table_spec( - service.UpdateTableSpecRequest(), - table_spec=gca_table_spec.TableSpec(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetColumnSpecRequest, - dict, -]) -def test_get_column_spec(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetColumnSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, column_spec.ColumnSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_get_column_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetColumnSpecRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_column_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetColumnSpecRequest( - name='name_value', - ) - -def test_get_column_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_column_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_column_spec] = mock_rpc - request = {} - client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_column_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_column_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_column_spec in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_column_spec] = mock_rpc - - request = {} - await client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_column_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_column_spec_async(transport: str = 'grpc_asyncio', request_type=service.GetColumnSpecRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetColumnSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, column_spec.ColumnSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_column_spec_async_from_dict(): - await test_get_column_spec_async(request_type=dict) - -def test_get_column_spec_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetColumnSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - call.return_value = column_spec.ColumnSpec() - client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_column_spec_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetColumnSpecRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(column_spec.ColumnSpec()) - await client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_column_spec_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = column_spec.ColumnSpec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_column_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_column_spec_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_column_spec( - service.GetColumnSpecRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_column_spec_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = column_spec.ColumnSpec() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(column_spec.ColumnSpec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_column_spec( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_column_spec_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_column_spec( - service.GetColumnSpecRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListColumnSpecsRequest, - dict, -]) -def test_list_column_specs(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListColumnSpecsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListColumnSpecsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListColumnSpecsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_column_specs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListColumnSpecsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_column_specs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListColumnSpecsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_column_specs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_column_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_column_specs] = mock_rpc - request = {} - client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_column_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_column_specs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_column_specs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_column_specs] = mock_rpc - - request = {} - await client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_column_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_column_specs_async(transport: str = 'grpc_asyncio', request_type=service.ListColumnSpecsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListColumnSpecsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListColumnSpecsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListColumnSpecsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_column_specs_async_from_dict(): - await test_list_column_specs_async(request_type=dict) - -def test_list_column_specs_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListColumnSpecsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - call.return_value = service.ListColumnSpecsResponse() - client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_column_specs_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListColumnSpecsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListColumnSpecsResponse()) - await client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_column_specs_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListColumnSpecsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_column_specs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_column_specs_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_column_specs( - service.ListColumnSpecsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_column_specs_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListColumnSpecsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListColumnSpecsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_column_specs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_column_specs_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_column_specs( - service.ListColumnSpecsRequest(), - parent='parent_value', - ) - - -def test_list_column_specs_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - next_page_token='abc', - ), - service.ListColumnSpecsResponse( - column_specs=[], - next_page_token='def', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - ], - next_page_token='ghi', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_column_specs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, column_spec.ColumnSpec) - for i in results) -def test_list_column_specs_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - next_page_token='abc', - ), - service.ListColumnSpecsResponse( - column_specs=[], - next_page_token='def', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - ], - next_page_token='ghi', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - ), - RuntimeError, - ) - pages = list(client.list_column_specs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_column_specs_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - next_page_token='abc', - ), - service.ListColumnSpecsResponse( - column_specs=[], - next_page_token='def', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - ], - next_page_token='ghi', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_column_specs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, column_spec.ColumnSpec) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_column_specs_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - next_page_token='abc', - ), - service.ListColumnSpecsResponse( - column_specs=[], - next_page_token='def', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - ], - next_page_token='ghi', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_column_specs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.UpdateColumnSpecRequest, - dict, -]) -def test_update_column_spec(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UpdateColumnSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_column_spec.ColumnSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_update_column_spec_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UpdateColumnSpecRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_column_spec(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UpdateColumnSpecRequest( - ) - -def test_update_column_spec_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_column_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_column_spec] = mock_rpc - request = {} - client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_column_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_column_spec_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_column_spec in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_column_spec] = mock_rpc - - request = {} - await client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_column_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_column_spec_async(transport: str = 'grpc_asyncio', request_type=service.UpdateColumnSpecRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gca_column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UpdateColumnSpecRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_column_spec.ColumnSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_column_spec_async_from_dict(): - await test_update_column_spec_async(request_type=dict) - -def test_update_column_spec_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateColumnSpecRequest() - - request.column_spec.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - call.return_value = gca_column_spec.ColumnSpec() - client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'column_spec.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_column_spec_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UpdateColumnSpecRequest() - - request.column_spec.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_column_spec.ColumnSpec()) - await client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'column_spec.name=name_value', - ) in kw['metadata'] - - -def test_update_column_spec_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_column_spec.ColumnSpec() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_column_spec( - column_spec=gca_column_spec.ColumnSpec(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].column_spec - mock_val = gca_column_spec.ColumnSpec(name='name_value') - assert arg == mock_val - - -def test_update_column_spec_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_column_spec( - service.UpdateColumnSpecRequest(), - column_spec=gca_column_spec.ColumnSpec(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_update_column_spec_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gca_column_spec.ColumnSpec() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_column_spec.ColumnSpec()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_column_spec( - column_spec=gca_column_spec.ColumnSpec(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].column_spec - mock_val = gca_column_spec.ColumnSpec(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_column_spec_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_column_spec( - service.UpdateColumnSpecRequest(), - column_spec=gca_column_spec.ColumnSpec(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - service.CreateModelRequest, - dict, -]) -def test_create_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.CreateModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.CreateModelRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.CreateModelRequest( - parent='parent_value', - ) - -def test_create_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_model] = mock_rpc - request = {} - client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_model] = mock_rpc - - request = {} - await client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_model_async(transport: str = 'grpc_asyncio', request_type=service.CreateModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.CreateModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_model_async_from_dict(): - await test_create_model_async(request_type=dict) - -def test_create_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateModelRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.CreateModelRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_model( - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].model - mock_val = gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')) - assert arg == mock_val - - -def test_create_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_model( - service.CreateModelRequest(), - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - -@pytest.mark.asyncio -async def test_create_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_model( - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].model - mock_val = gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_model( - service.CreateModelRequest(), - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelRequest, - dict, -]) -def test_get_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - ) - response = client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == model.Model.DeploymentState.DEPLOYED - - -def test_get_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetModelRequest( - name='name_value', - ) - -def test_get_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model] = mock_rpc - request = {} - client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_model] = mock_rpc - - request = {} - await client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_async(transport: str = 'grpc_asyncio', request_type=service.GetModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - )) - response = await client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == model.Model.DeploymentState.DEPLOYED - - -@pytest.mark.asyncio -async def test_get_model_async_from_dict(): - await test_get_model_async(request_type=dict) - -def test_get_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value = model.Model() - client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) - await client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model.Model() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model( - service.GetModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model.Model() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_model( - service.GetModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelsRequest, - dict, -]) -def test_list_models(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListModelsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_models_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListModelsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_models(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListModelsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_models_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_models in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_models] = mock_rpc - request = {} - client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_models(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_models_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_models in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_models] = mock_rpc - - request = {} - await client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_models(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_models_async(transport: str = 'grpc_asyncio', request_type=service.ListModelsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListModelsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_models_async_from_dict(): - await test_list_models_async(request_type=dict) - -def test_list_models_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value = service.ListModelsResponse() - client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_models_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse()) - await client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_models_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_models( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_models_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_models( - service.ListModelsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_models_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_models( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_models_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_models( - service.ListModelsRequest(), - parent='parent_value', - ) - - -def test_list_models_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_models(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model.Model) - for i in results) -def test_list_models_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - pages = list(client.list_models(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_models_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_models(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, model.Model) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_models_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_models(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - service.DeleteModelRequest, - dict, -]) -def test_delete_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeleteModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeleteModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeleteModelRequest( - name='name_value', - ) - -def test_delete_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_model] = mock_rpc - request = {} - client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_model] = mock_rpc - - request = {} - await client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_model_async(transport: str = 'grpc_asyncio', request_type=service.DeleteModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeleteModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_model_async_from_dict(): - await test_delete_model_async(request_type=dict) - -def test_delete_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeleteModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_model( - service.DeleteModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_model( - service.DeleteModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.DeployModelRequest, - dict, -]) -def test_deploy_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.DeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_deploy_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.DeployModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.deploy_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.DeployModelRequest( - name='name_value', - ) - -def test_deploy_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.deploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.deploy_model] = mock_rpc - request = {} - client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_deploy_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.deploy_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.deploy_model] = mock_rpc - - request = {} - await client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.deploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_deploy_model_async(transport: str = 'grpc_asyncio', request_type=service.DeployModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.DeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_deploy_model_async_from_dict(): - await test_deploy_model_async(request_type=dict) - -def test_deploy_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deploy_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.DeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_deploy_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.deploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_deploy_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.deploy_model( - service.DeployModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_deploy_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.deploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_deploy_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.deploy_model( - service.DeployModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.UndeployModelRequest, - dict, -]) -def test_undeploy_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.UndeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_undeploy_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.UndeployModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.undeploy_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.UndeployModelRequest( - name='name_value', - ) - -def test_undeploy_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.undeploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.undeploy_model] = mock_rpc - request = {} - client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_undeploy_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.undeploy_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.undeploy_model] = mock_rpc - - request = {} - await client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.undeploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_undeploy_model_async(transport: str = 'grpc_asyncio', request_type=service.UndeployModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.UndeployModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_undeploy_model_async_from_dict(): - await test_undeploy_model_async(request_type=dict) - -def test_undeploy_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UndeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_undeploy_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.UndeployModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_undeploy_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.undeploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_undeploy_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.undeploy_model( - service.UndeployModelRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_undeploy_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.undeploy_model( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_undeploy_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.undeploy_model( - service.UndeployModelRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ExportModelRequest, - dict, -]) -def test_export_model(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ExportModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_model_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ExportModelRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_model(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ExportModelRequest( - name='name_value', - ) - -def test_export_model_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_model] = mock_rpc - request = {} - client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_model_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.export_model in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_model] = mock_rpc - - request = {} - await client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_model_async(transport: str = 'grpc_asyncio', request_type=service.ExportModelRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ExportModelRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_model_async_from_dict(): - await test_export_model_async(request_type=dict) - -def test_export_model_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_model_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportModelRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_export_model_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_model( - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - - -def test_export_model_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_model( - service.ExportModelRequest(), - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - -@pytest.mark.asyncio -async def test_export_model_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_model( - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_model_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_model( - service.ExportModelRequest(), - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.ExportEvaluatedExamplesRequest, - dict, -]) -def test_export_evaluated_examples(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ExportEvaluatedExamplesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_export_evaluated_examples_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ExportEvaluatedExamplesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_evaluated_examples(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ExportEvaluatedExamplesRequest( - name='name_value', - ) - -def test_export_evaluated_examples_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_evaluated_examples in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_evaluated_examples] = mock_rpc - request = {} - client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_evaluated_examples(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_evaluated_examples_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.export_evaluated_examples in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.export_evaluated_examples] = mock_rpc - - request = {} - await client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.export_evaluated_examples(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_export_evaluated_examples_async(transport: str = 'grpc_asyncio', request_type=service.ExportEvaluatedExamplesRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ExportEvaluatedExamplesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_export_evaluated_examples_async_from_dict(): - await test_export_evaluated_examples_async(request_type=dict) - -def test_export_evaluated_examples_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportEvaluatedExamplesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_export_evaluated_examples_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ExportEvaluatedExamplesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_export_evaluated_examples_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.export_evaluated_examples( - name='name_value', - output_config=io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')) - assert arg == mock_val - - -def test_export_evaluated_examples_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_evaluated_examples( - service.ExportEvaluatedExamplesRequest(), - name='name_value', - output_config=io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')), - ) - -@pytest.mark.asyncio -async def test_export_evaluated_examples_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.export_evaluated_examples( - name='name_value', - output_config=io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].output_config - mock_val = io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_export_evaluated_examples_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.export_evaluated_examples( - service.ExportEvaluatedExamplesRequest(), - name='name_value', - output_config=io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelEvaluationRequest, - dict, -]) -def test_get_model_evaluation(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - ) - response = client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.GetModelEvaluationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model_evaluation.ModelEvaluation) - assert response.name == 'name_value' - assert response.annotation_spec_id == 'annotation_spec_id_value' - assert response.display_name == 'display_name_value' - assert response.evaluated_example_count == 2446 - - -def test_get_model_evaluation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.GetModelEvaluationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_model_evaluation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.GetModelEvaluationRequest( - name='name_value', - ) - -def test_get_model_evaluation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model_evaluation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model_evaluation] = mock_rpc - request = {} - client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model_evaluation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_evaluation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_model_evaluation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_model_evaluation] = mock_rpc - - request = {} - await client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_model_evaluation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_model_evaluation_async(transport: str = 'grpc_asyncio', request_type=service.GetModelEvaluationRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - )) - response = await client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.GetModelEvaluationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, model_evaluation.ModelEvaluation) - assert response.name == 'name_value' - assert response.annotation_spec_id == 'annotation_spec_id_value' - assert response.display_name == 'display_name_value' - assert response.evaluated_example_count == 2446 - - -@pytest.mark.asyncio -async def test_get_model_evaluation_async_from_dict(): - await test_get_model_evaluation_async(request_type=dict) - -def test_get_model_evaluation_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelEvaluationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value = model_evaluation.ModelEvaluation() - client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_model_evaluation_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.GetModelEvaluationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation()) - await client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_model_evaluation_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model_evaluation.ModelEvaluation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_model_evaluation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_model_evaluation_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model_evaluation( - service.GetModelEvaluationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_model_evaluation_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = model_evaluation.ModelEvaluation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_model_evaluation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_model_evaluation_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_model_evaluation( - service.GetModelEvaluationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelEvaluationsRequest, - dict, -]) -def test_list_model_evaluations(request_type, transport: str = 'grpc'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = service.ListModelEvaluationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelEvaluationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_model_evaluations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = service.ListModelEvaluationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_model_evaluations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == service.ListModelEvaluationsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_model_evaluations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_model_evaluations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_model_evaluations] = mock_rpc - request = {} - client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_model_evaluations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_model_evaluations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_model_evaluations] = mock_rpc - - request = {} - await client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_model_evaluations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_model_evaluations_async(transport: str = 'grpc_asyncio', request_type=service.ListModelEvaluationsRequest): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = service.ListModelEvaluationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelEvaluationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_from_dict(): - await test_list_model_evaluations_async(request_type=dict) - -def test_list_model_evaluations_field_headers(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelEvaluationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value = service.ListModelEvaluationsResponse() - client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_model_evaluations_field_headers_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = service.ListModelEvaluationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse()) - await client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_model_evaluations_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelEvaluationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_model_evaluations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_model_evaluations_flattened_error(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_model_evaluations( - service.ListModelEvaluationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_model_evaluations_flattened_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = service.ListModelEvaluationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_model_evaluations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_model_evaluations_flattened_error_async(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_model_evaluations( - service.ListModelEvaluationsRequest(), - parent='parent_value', - ) - - -def test_list_model_evaluations_pager(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_model_evaluations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model_evaluation.ModelEvaluation) - for i in results) -def test_list_model_evaluations_pages(transport_name: str = "grpc"): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_model_evaluations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_pager(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_model_evaluations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, model_evaluation.ModelEvaluation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_model_evaluations_async_pages(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_model_evaluations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_dataset] = mock_rpc - - request = {} - client.create_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_dataset_rest_required_fields(request_type=service.CreateDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "dataset", ))) - - -def test_create_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*}/datasets" % client.transport._host, args[1]) - - -def test_create_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dataset( - service.CreateDatasetRequest(), - parent='parent_value', - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - -def test_get_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_dataset] = mock_rpc - - request = {} - client.get_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_dataset_rest_required_fields(request_type=service.GetDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dataset.Dataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dataset.Dataset() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*}" % client.transport._host, args[1]) - - -def test_get_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dataset( - service.GetDatasetRequest(), - name='name_value', - ) - - -def test_list_datasets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_datasets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_datasets] = mock_rpc - - request = {} - client.list_datasets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_datasets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_datasets_rest_required_fields(request_type=service.ListDatasetsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_datasets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_datasets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListDatasetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_datasets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_datasets_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_datasets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_datasets_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListDatasetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_datasets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*}/datasets" % client.transport._host, args[1]) - - -def test_list_datasets_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_datasets( - service.ListDatasetsRequest(), - parent='parent_value', - ) - - -def test_list_datasets_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - dataset.Dataset(), - ], - next_page_token='abc', - ), - service.ListDatasetsResponse( - datasets=[], - next_page_token='def', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - ], - next_page_token='ghi', - ), - service.ListDatasetsResponse( - datasets=[ - dataset.Dataset(), - dataset.Dataset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListDatasetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_datasets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataset.Dataset) - for i in results) - - pages = list(client.list_datasets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_dataset] = mock_rpc - - request = {} - client.update_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_dataset_rest_required_fields(request_type=service.UpdateDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_dataset._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("dataset", ))) - - -def test_update_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset() - - # get arguments that satisfy an http rule for this method - sample_request = {'dataset': {'name': 'projects/sample1/locations/sample2/datasets/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{dataset.name=projects/*/locations/*/datasets/*}" % client.transport._host, args[1]) - - -def test_update_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_dataset( - service.UpdateDatasetRequest(), - dataset=gca_dataset.Dataset(translation_dataset_metadata=translation.TranslationDatasetMetadata(source_language_code='source_language_code_value')), - ) - - -def test_delete_dataset_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_dataset in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_dataset] = mock_rpc - - request = {} - client.delete_dataset(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_dataset(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_dataset_rest_required_fields(request_type=service.DeleteDatasetRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_dataset._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_dataset(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_dataset_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_dataset._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_dataset_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_dataset(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*}" % client.transport._host, args[1]) - - -def test_delete_dataset_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dataset( - service.DeleteDatasetRequest(), - name='name_value', - ) - - -def test_import_data_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.import_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.import_data] = mock_rpc - - request = {} - client.import_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.import_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_import_data_rest_required_fields(request_type=service.ImportDataRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.import_data(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_import_data_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.import_data._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) - - -def test_import_data_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.import_data(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*}:importData" % client.transport._host, args[1]) - - -def test_import_data_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_data( - service.ImportDataRequest(), - name='name_value', - input_config=io.InputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - ) - - -def test_export_data_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_data in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_data] = mock_rpc - - request = {} - client.export_data(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_data(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_data_rest_required_fields(request_type=service.ExportDataRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_data._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_data(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_data_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_data._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) - - -def test_export_data_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.export_data(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*}:exportData" % client.transport._host, args[1]) - - -def test_export_data_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_data( - service.ExportDataRequest(), - name='name_value', - output_config=io.OutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -def test_get_annotation_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_annotation_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_annotation_spec] = mock_rpc - - request = {} - client.get_annotation_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_annotation_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_annotation_spec_rest_required_fields(request_type=service.GetAnnotationSpecRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_annotation_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_annotation_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = annotation_spec.AnnotationSpec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = annotation_spec.AnnotationSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_annotation_spec(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_annotation_spec_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_annotation_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_annotation_spec_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = annotation_spec.AnnotationSpec() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3/annotationSpecs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = annotation_spec.AnnotationSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_annotation_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*/annotationSpecs/*}" % client.transport._host, args[1]) - - -def test_get_annotation_spec_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_annotation_spec( - service.GetAnnotationSpecRequest(), - name='name_value', - ) - - -def test_get_table_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table_spec] = mock_rpc - - request = {} - client.get_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_table_spec_rest_required_fields(request_type=service.GetTableSpecRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("field_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = table_spec.TableSpec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = table_spec.TableSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_table_spec(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_table_spec_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_table_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(("fieldMask", )) & set(("name", ))) - - -def test_get_table_spec_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = table_spec.TableSpec() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = table_spec.TableSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_table_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*}" % client.transport._host, args[1]) - - -def test_get_table_spec_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table_spec( - service.GetTableSpecRequest(), - name='name_value', - ) - - -def test_list_table_specs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_table_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_table_specs] = mock_rpc - - request = {} - client.list_table_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_table_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_table_specs_rest_required_fields(request_type=service.ListTableSpecsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_table_specs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_table_specs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("field_mask", "filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListTableSpecsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListTableSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_table_specs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_table_specs_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_table_specs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("fieldMask", "filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_table_specs_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListTableSpecsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/datasets/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListTableSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_table_specs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*/datasets/*}/tableSpecs" % client.transport._host, args[1]) - - -def test_list_table_specs_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_table_specs( - service.ListTableSpecsRequest(), - parent='parent_value', - ) - - -def test_list_table_specs_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - next_page_token='abc', - ), - service.ListTableSpecsResponse( - table_specs=[], - next_page_token='def', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - ], - next_page_token='ghi', - ), - service.ListTableSpecsResponse( - table_specs=[ - table_spec.TableSpec(), - table_spec.TableSpec(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListTableSpecsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/datasets/sample3'} - - pager = client.list_table_specs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, table_spec.TableSpec) - for i in results) - - pages = list(client.list_table_specs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_table_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_table_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_table_spec] = mock_rpc - - request = {} - client.update_table_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_table_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_table_spec_rest_required_fields(request_type=service.UpdateTableSpecRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_table_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_table_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gca_table_spec.TableSpec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_table_spec.TableSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_table_spec(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_table_spec_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_table_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("tableSpec", ))) - - -def test_update_table_spec_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_table_spec.TableSpec() - - # get arguments that satisfy an http rule for this method - sample_request = {'table_spec': {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - table_spec=gca_table_spec.TableSpec(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gca_table_spec.TableSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_table_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{table_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*}" % client.transport._host, args[1]) - - -def test_update_table_spec_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_table_spec( - service.UpdateTableSpecRequest(), - table_spec=gca_table_spec.TableSpec(name='name_value'), - ) - - -def test_get_column_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_column_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_column_spec] = mock_rpc - - request = {} - client.get_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_column_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_column_spec_rest_required_fields(request_type=service.GetColumnSpecRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_column_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_column_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("field_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = column_spec.ColumnSpec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = column_spec.ColumnSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_column_spec(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_column_spec_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_column_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(("fieldMask", )) & set(("name", ))) - - -def test_get_column_spec_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = column_spec.ColumnSpec() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = column_spec.ColumnSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_column_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}" % client.transport._host, args[1]) - - -def test_get_column_spec_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_column_spec( - service.GetColumnSpecRequest(), - name='name_value', - ) - - -def test_list_column_specs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_column_specs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_column_specs] = mock_rpc - - request = {} - client.list_column_specs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_column_specs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_column_specs_rest_required_fields(request_type=service.ListColumnSpecsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_column_specs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_column_specs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("field_mask", "filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListColumnSpecsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListColumnSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_column_specs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_column_specs_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_column_specs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("fieldMask", "filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_column_specs_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListColumnSpecsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListColumnSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_column_specs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*/datasets/*/tableSpecs/*}/columnSpecs" % client.transport._host, args[1]) - - -def test_list_column_specs_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_column_specs( - service.ListColumnSpecsRequest(), - parent='parent_value', - ) - - -def test_list_column_specs_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - next_page_token='abc', - ), - service.ListColumnSpecsResponse( - column_specs=[], - next_page_token='def', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - ], - next_page_token='ghi', - ), - service.ListColumnSpecsResponse( - column_specs=[ - column_spec.ColumnSpec(), - column_spec.ColumnSpec(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListColumnSpecsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - - pager = client.list_column_specs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, column_spec.ColumnSpec) - for i in results) - - pages = list(client.list_column_specs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_column_spec_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_column_spec in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_column_spec] = mock_rpc - - request = {} - client.update_column_spec(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_column_spec(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_column_spec_rest_required_fields(request_type=service.UpdateColumnSpecRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_column_spec._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_column_spec._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gca_column_spec.ColumnSpec() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_column_spec.ColumnSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_column_spec(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_column_spec_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_column_spec._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("columnSpec", ))) - - -def test_update_column_spec_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_column_spec.ColumnSpec() - - # get arguments that satisfy an http rule for this method - sample_request = {'column_spec': {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5'}} - - # get truthy value for each flattened field - mock_args = dict( - column_spec=gca_column_spec.ColumnSpec(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gca_column_spec.ColumnSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_column_spec(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{column_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}" % client.transport._host, args[1]) - - -def test_update_column_spec_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_column_spec( - service.UpdateColumnSpecRequest(), - column_spec=gca_column_spec.ColumnSpec(name='name_value'), - ) - - -def test_create_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_model] = mock_rpc - - request = {} - client.create_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_model_rest_required_fields(request_type=service.CreateModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "model", ))) - - -def test_create_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*}/models" % client.transport._host, args[1]) - - -def test_create_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_model( - service.CreateModelRequest(), - parent='parent_value', - model=gca_model.Model(translation_model_metadata=translation.TranslationModelMetadata(base_model='base_model_value')), - ) - - -def test_get_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model] = mock_rpc - - request = {} - client.get_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_model_rest_required_fields(request_type=service.GetModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = model.Model() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model.Model() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}" % client.transport._host, args[1]) - - -def test_get_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model( - service.GetModelRequest(), - name='name_value', - ) - - -def test_list_models_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_models in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_models] = mock_rpc - - request = {} - client.list_models(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_models(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_models_rest_required_fields(request_type=service.ListModelsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_models._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_models._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListModelsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_models(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_models_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_models._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_models_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListModelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_models(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*}/models" % client.transport._host, args[1]) - - -def test_list_models_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_models( - service.ListModelsRequest(), - parent='parent_value', - ) - - -def test_list_models_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - model.Model(), - ], - next_page_token='abc', - ), - service.ListModelsResponse( - model=[], - next_page_token='def', - ), - service.ListModelsResponse( - model=[ - model.Model(), - ], - next_page_token='ghi', - ), - service.ListModelsResponse( - model=[ - model.Model(), - model.Model(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListModelsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_models(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model.Model) - for i in results) - - pages = list(client.list_models(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_model] = mock_rpc - - request = {} - client.delete_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_model_rest_required_fields(request_type=service.DeleteModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}" % client.transport._host, args[1]) - - -def test_delete_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_model( - service.DeleteModelRequest(), - name='name_value', - ) - - -def test_deploy_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.deploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.deploy_model] = mock_rpc - - request = {} - client.deploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.deploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_deploy_model_rest_required_fields(request_type=service.DeployModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).deploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.deploy_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_deploy_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.deploy_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_deploy_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.deploy_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}:deploy" % client.transport._host, args[1]) - - -def test_deploy_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.deploy_model( - service.DeployModelRequest(), - name='name_value', - ) - - -def test_undeploy_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.undeploy_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.undeploy_model] = mock_rpc - - request = {} - client.undeploy_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.undeploy_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_undeploy_model_rest_required_fields(request_type=service.UndeployModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undeploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undeploy_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.undeploy_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_undeploy_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.undeploy_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_undeploy_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.undeploy_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}:undeploy" % client.transport._host, args[1]) - - -def test_undeploy_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.undeploy_model( - service.UndeployModelRequest(), - name='name_value', - ) - - -def test_export_model_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_model in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_model] = mock_rpc - - request = {} - client.export_model(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_model(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_model_rest_required_fields(request_type=service.ExportModelRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_model._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_model(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_model_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_model._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) - - -def test_export_model_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.export_model(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}:export" % client.transport._host, args[1]) - - -def test_export_model_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_model( - service.ExportModelRequest(), - name='name_value', - output_config=io.ModelExportOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - ) - - -def test_export_evaluated_examples_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.export_evaluated_examples in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.export_evaluated_examples] = mock_rpc - - request = {} - client.export_evaluated_examples(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.export_evaluated_examples(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_evaluated_examples_rest_required_fields(request_type=service.ExportEvaluatedExamplesRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_evaluated_examples._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_evaluated_examples._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_evaluated_examples(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_export_evaluated_examples_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.export_evaluated_examples._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) - - -def test_export_evaluated_examples_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - output_config=io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.export_evaluated_examples(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}:exportEvaluatedExamples" % client.transport._host, args[1]) - - -def test_export_evaluated_examples_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.export_evaluated_examples( - service.ExportEvaluatedExamplesRequest(), - name='name_value', - output_config=io.ExportEvaluatedExamplesOutputConfig(bigquery_destination=io.BigQueryDestination(output_uri='output_uri_value')), - ) - - -def test_get_model_evaluation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_model_evaluation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_model_evaluation] = mock_rpc - - request = {} - client.get_model_evaluation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_model_evaluation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_model_evaluation_rest_required_fields(request_type=service.GetModelEvaluationRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model_evaluation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_model_evaluation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = model_evaluation.ModelEvaluation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model_evaluation.ModelEvaluation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_model_evaluation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_model_evaluation_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_model_evaluation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_model_evaluation_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model_evaluation.ModelEvaluation() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3/modelEvaluations/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = model_evaluation.ModelEvaluation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_model_evaluation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*/modelEvaluations/*}" % client.transport._host, args[1]) - - -def test_get_model_evaluation_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_model_evaluation( - service.GetModelEvaluationRequest(), - name='name_value', - ) - - -def test_list_model_evaluations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_model_evaluations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_model_evaluations] = mock_rpc - - request = {} - client.list_model_evaluations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_model_evaluations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_model_evaluations_rest_required_fields(request_type=service.ListModelEvaluationsRequest): - transport_class = transports.AutoMlRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_model_evaluations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_model_evaluations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = service.ListModelEvaluationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelEvaluationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_model_evaluations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_model_evaluations_rest_unset_required_fields(): - transport = transports.AutoMlRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_model_evaluations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_model_evaluations_rest_flattened(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelEvaluationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = service.ListModelEvaluationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_model_evaluations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{parent=projects/*/locations/*/models/*}/modelEvaluations" % client.transport._host, args[1]) - - -def test_list_model_evaluations_rest_flattened_error(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_model_evaluations( - service.ListModelEvaluationsRequest(), - parent='parent_value', - ) - - -def test_list_model_evaluations_rest_pager(transport: str = 'rest'): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - next_page_token='abc', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[], - next_page_token='def', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - ], - next_page_token='ghi', - ), - service.ListModelEvaluationsResponse( - model_evaluation=[ - model_evaluation.ModelEvaluation(), - model_evaluation.ModelEvaluation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListModelEvaluationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - - pager = client.list_model_evaluations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, model_evaluation.ModelEvaluation) - for i in results) - - pages = list(client.list_model_evaluations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoMlClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AutoMlClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AutoMlClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AutoMlClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AutoMlClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AutoMlGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AutoMlGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AutoMlGrpcTransport, - transports.AutoMlGrpcAsyncIOTransport, - transports.AutoMlRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AutoMlClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - call.return_value = gca_dataset.Dataset() - client.create_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - call.return_value = dataset.Dataset() - client.get_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_datasets_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - call.return_value = service.ListDatasetsResponse() - client.list_datasets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListDatasetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - call.return_value = gca_dataset.Dataset() - client.update_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_dataset_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_data_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.import_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ImportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_data_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_annotation_spec_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - call.return_value = annotation_spec.AnnotationSpec() - client.get_annotation_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAnnotationSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_spec_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - call.return_value = table_spec.TableSpec() - client.get_table_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTableSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_table_specs_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - call.return_value = service.ListTableSpecsResponse() - client.list_table_specs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTableSpecsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_table_spec_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - call.return_value = gca_table_spec.TableSpec() - client.update_table_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTableSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_column_spec_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - call.return_value = column_spec.ColumnSpec() - client.get_column_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetColumnSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_column_specs_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - call.return_value = service.ListColumnSpecsResponse() - client.list_column_specs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListColumnSpecsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_column_spec_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - call.return_value = gca_column_spec.ColumnSpec() - client.update_column_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateColumnSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - call.return_value = model.Model() - client.get_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_models_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - call.return_value = service.ListModelsResponse() - client.list_models(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_deploy_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.deploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_undeploy_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.undeploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UndeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_model_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_evaluated_examples_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.export_evaluated_examples(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportEvaluatedExamplesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_evaluation_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - call.return_value = model_evaluation.ModelEvaluation() - client.get_model_evaluation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelEvaluationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_model_evaluations_empty_call_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - call.return_value = service.ListModelEvaluationsResponse() - client.list_model_evaluations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelEvaluationsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AutoMlAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - await client.create_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - await client.get_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_datasets_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListDatasetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_datasets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListDatasetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - )) - await client.update_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_dataset_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_import_data_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.import_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ImportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_data_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_annotation_spec_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - )) - await client.get_annotation_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAnnotationSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_table_spec_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - )) - await client.get_table_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTableSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_table_specs_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListTableSpecsResponse( - next_page_token='next_page_token_value', - )) - await client.list_table_specs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTableSpecsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_table_spec_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - )) - await client.update_table_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTableSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_column_spec_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.get_column_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetColumnSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_column_specs_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListColumnSpecsResponse( - next_page_token='next_page_token_value', - )) - await client.list_column_specs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListColumnSpecsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_column_spec_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gca_column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.update_column_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateColumnSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - )) - await client.get_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_models_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelsResponse( - next_page_token='next_page_token_value', - )) - await client.list_models(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_deploy_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.deploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_undeploy_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.undeploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UndeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_model_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_evaluated_examples_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_evaluated_examples(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportEvaluatedExamplesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_model_evaluation_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - )) - await client.get_model_evaluation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelEvaluationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_model_evaluations_empty_call_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_model_evaluations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelEvaluationsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AutoMlClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_dataset_rest_bad_request(request_type=service.CreateDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateDatasetRequest, - dict, -]) -def test_create_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["dataset"] = {'translation_dataset_metadata': {'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_dataset_metadata': {'classification_type': 1}, 'text_classification_dataset_metadata': {'classification_type': 1}, 'image_object_detection_dataset_metadata': {}, 'video_classification_dataset_metadata': {}, 'video_object_tracking_dataset_metadata': {}, 'text_extraction_dataset_metadata': {}, 'text_sentiment_dataset_metadata': {'sentiment_max': 1404}, 'tables_dataset_metadata': {'primary_table_spec_id': 'primary_table_spec_id_value', 'target_column_spec_id': 'target_column_spec_id_value', 'weight_column_spec_id': 'weight_column_spec_id_value', 'ml_use_column_spec_id': 'ml_use_column_spec_id_value', 'target_column_correlations': {}, 'stats_update_time': {'seconds': 751, 'nanos': 543}}, 'name': 'name_value', 'display_name': 'display_name_value', 'description': 'description_value', 'example_count': 1396, 'create_time': {}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateDatasetRequest.meta.fields["dataset"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dataset"][field])): - del request_init["dataset"][field][i][subfield] - else: - del request_init["dataset"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_create_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateDatasetRequest.pb(service.CreateDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gca_dataset.Dataset.to_json(gca_dataset.Dataset()) - req.return_value.content = return_value - - request = service.CreateDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gca_dataset.Dataset() - post_with_metadata.return_value = gca_dataset.Dataset(), metadata - - client.create_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_dataset_rest_bad_request(request_type=service.GetDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetDatasetRequest, - dict, -]) -def test_get_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetDatasetRequest.pb(service.GetDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dataset.Dataset.to_json(dataset.Dataset()) - req.return_value.content = return_value - - request = service.GetDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = dataset.Dataset() - post_with_metadata.return_value = dataset.Dataset(), metadata - - client.get_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_datasets_rest_bad_request(request_type=service.ListDatasetsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_datasets(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListDatasetsRequest, - dict, -]) -def test_list_datasets_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListDatasetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListDatasetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_datasets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatasetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_datasets_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_datasets") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_datasets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_datasets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListDatasetsRequest.pb(service.ListDatasetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListDatasetsResponse.to_json(service.ListDatasetsResponse()) - req.return_value.content = return_value - - request = service.ListDatasetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListDatasetsResponse() - post_with_metadata.return_value = service.ListDatasetsResponse(), metadata - - client.list_datasets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_dataset_rest_bad_request(request_type=service.UpdateDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'dataset': {'name': 'projects/sample1/locations/sample2/datasets/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateDatasetRequest, - dict, -]) -def test_update_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'dataset': {'name': 'projects/sample1/locations/sample2/datasets/sample3'}} - request_init["dataset"] = {'translation_dataset_metadata': {'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_dataset_metadata': {'classification_type': 1}, 'text_classification_dataset_metadata': {'classification_type': 1}, 'image_object_detection_dataset_metadata': {}, 'video_classification_dataset_metadata': {}, 'video_object_tracking_dataset_metadata': {}, 'text_extraction_dataset_metadata': {}, 'text_sentiment_dataset_metadata': {'sentiment_max': 1404}, 'tables_dataset_metadata': {'primary_table_spec_id': 'primary_table_spec_id_value', 'target_column_spec_id': 'target_column_spec_id_value', 'weight_column_spec_id': 'weight_column_spec_id_value', 'ml_use_column_spec_id': 'ml_use_column_spec_id_value', 'target_column_correlations': {}, 'stats_update_time': {'seconds': 751, 'nanos': 543}}, 'name': 'projects/sample1/locations/sample2/datasets/sample3', 'display_name': 'display_name_value', 'description': 'description_value', 'example_count': 1396, 'create_time': {}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateDatasetRequest.meta.fields["dataset"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["dataset"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["dataset"][field])): - del request_init["dataset"][field][i][subfield] - else: - del request_init["dataset"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_dataset.Dataset( - name='name_value', - display_name='display_name_value', - description='description_value', - example_count=1396, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_dataset.Dataset.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_dataset(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_dataset.Dataset) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.example_count == 1396 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_update_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateDatasetRequest.pb(service.UpdateDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gca_dataset.Dataset.to_json(gca_dataset.Dataset()) - req.return_value.content = return_value - - request = service.UpdateDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gca_dataset.Dataset() - post_with_metadata.return_value = gca_dataset.Dataset(), metadata - - client.update_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_dataset_rest_bad_request(request_type=service.DeleteDatasetRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_dataset(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteDatasetRequest, - dict, -]) -def test_delete_dataset_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_dataset(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_dataset_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_dataset") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_dataset_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_delete_dataset") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteDatasetRequest.pb(service.DeleteDatasetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteDatasetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_dataset(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_import_data_rest_bad_request(request_type=service.ImportDataRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.import_data(request) - - -@pytest.mark.parametrize("request_type", [ - service.ImportDataRequest, - dict, -]) -def test_import_data_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.import_data(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_data_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_import_data") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_import_data_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_import_data") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ImportDataRequest.pb(service.ImportDataRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ImportDataRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.import_data(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_export_data_rest_bad_request(request_type=service.ExportDataRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_data(request) - - -@pytest.mark.parametrize("request_type", [ - service.ExportDataRequest, - dict, -]) -def test_export_data_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_data(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_data_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_data") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_data_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_export_data") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ExportDataRequest.pb(service.ExportDataRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ExportDataRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_data(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_annotation_spec_rest_bad_request(request_type=service.GetAnnotationSpecRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/annotationSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_annotation_spec(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetAnnotationSpecRequest, - dict, -]) -def test_get_annotation_spec_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/annotationSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = annotation_spec.AnnotationSpec( - name='name_value', - display_name='display_name_value', - example_count=1396, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = annotation_spec.AnnotationSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_annotation_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, annotation_spec.AnnotationSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.example_count == 1396 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_annotation_spec_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_annotation_spec") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_annotation_spec_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_annotation_spec") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetAnnotationSpecRequest.pb(service.GetAnnotationSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = annotation_spec.AnnotationSpec.to_json(annotation_spec.AnnotationSpec()) - req.return_value.content = return_value - - request = service.GetAnnotationSpecRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = annotation_spec.AnnotationSpec() - post_with_metadata.return_value = annotation_spec.AnnotationSpec(), metadata - - client.get_annotation_spec(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_table_spec_rest_bad_request(request_type=service.GetTableSpecRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_table_spec(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetTableSpecRequest, - dict, -]) -def test_get_table_spec_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = table_spec.TableSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_table_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, table_spec.TableSpec) - assert response.name == 'name_value' - assert response.time_column_spec_id == 'time_column_spec_id_value' - assert response.row_count == 992 - assert response.valid_row_count == 1615 - assert response.column_count == 1302 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_table_spec_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_table_spec") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_table_spec_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_table_spec") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetTableSpecRequest.pb(service.GetTableSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = table_spec.TableSpec.to_json(table_spec.TableSpec()) - req.return_value.content = return_value - - request = service.GetTableSpecRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = table_spec.TableSpec() - post_with_metadata.return_value = table_spec.TableSpec(), metadata - - client.get_table_spec(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_table_specs_rest_bad_request(request_type=service.ListTableSpecsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_table_specs(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListTableSpecsRequest, - dict, -]) -def test_list_table_specs_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/datasets/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListTableSpecsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListTableSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_table_specs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTableSpecsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_table_specs_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_table_specs") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_table_specs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_table_specs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListTableSpecsRequest.pb(service.ListTableSpecsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListTableSpecsResponse.to_json(service.ListTableSpecsResponse()) - req.return_value.content = return_value - - request = service.ListTableSpecsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListTableSpecsResponse() - post_with_metadata.return_value = service.ListTableSpecsResponse(), metadata - - client.list_table_specs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_table_spec_rest_bad_request(request_type=service.UpdateTableSpecRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'table_spec': {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_table_spec(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateTableSpecRequest, - dict, -]) -def test_update_table_spec_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'table_spec': {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'}} - request_init["table_spec"] = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4', 'time_column_spec_id': 'time_column_spec_id_value', 'row_count': 992, 'valid_row_count': 1615, 'column_count': 1302, 'input_configs': [{'gcs_source': {'input_uris': ['input_uris_value1', 'input_uris_value2']}, 'bigquery_source': {'input_uri': 'input_uri_value'}, 'params': {}}], 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateTableSpecRequest.meta.fields["table_spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["table_spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["table_spec"][field])): - del request_init["table_spec"][field][i][subfield] - else: - del request_init["table_spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_table_spec.TableSpec( - name='name_value', - time_column_spec_id='time_column_spec_id_value', - row_count=992, - valid_row_count=1615, - column_count=1302, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_table_spec.TableSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_table_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_table_spec.TableSpec) - assert response.name == 'name_value' - assert response.time_column_spec_id == 'time_column_spec_id_value' - assert response.row_count == 992 - assert response.valid_row_count == 1615 - assert response.column_count == 1302 - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_table_spec_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_table_spec") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_table_spec_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_update_table_spec") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateTableSpecRequest.pb(service.UpdateTableSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gca_table_spec.TableSpec.to_json(gca_table_spec.TableSpec()) - req.return_value.content = return_value - - request = service.UpdateTableSpecRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gca_table_spec.TableSpec() - post_with_metadata.return_value = gca_table_spec.TableSpec(), metadata - - client.update_table_spec(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_column_spec_rest_bad_request(request_type=service.GetColumnSpecRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_column_spec(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetColumnSpecRequest, - dict, -]) -def test_get_column_spec_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = column_spec.ColumnSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_column_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, column_spec.ColumnSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_column_spec_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_column_spec") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_column_spec_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_column_spec") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetColumnSpecRequest.pb(service.GetColumnSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = column_spec.ColumnSpec.to_json(column_spec.ColumnSpec()) - req.return_value.content = return_value - - request = service.GetColumnSpecRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = column_spec.ColumnSpec() - post_with_metadata.return_value = column_spec.ColumnSpec(), metadata - - client.get_column_spec(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_column_specs_rest_bad_request(request_type=service.ListColumnSpecsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_column_specs(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListColumnSpecsRequest, - dict, -]) -def test_list_column_specs_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListColumnSpecsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListColumnSpecsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_column_specs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListColumnSpecsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_column_specs_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_column_specs") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_column_specs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_column_specs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListColumnSpecsRequest.pb(service.ListColumnSpecsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListColumnSpecsResponse.to_json(service.ListColumnSpecsResponse()) - req.return_value.content = return_value - - request = service.ListColumnSpecsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListColumnSpecsResponse() - post_with_metadata.return_value = service.ListColumnSpecsResponse(), metadata - - client.list_column_specs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_column_spec_rest_bad_request(request_type=service.UpdateColumnSpecRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'column_spec': {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_column_spec(request) - - -@pytest.mark.parametrize("request_type", [ - service.UpdateColumnSpecRequest, - dict, -]) -def test_update_column_spec_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'column_spec': {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5'}} - request_init["column_spec"] = {'name': 'projects/sample1/locations/sample2/datasets/sample3/tableSpecs/sample4/columnSpecs/sample5', 'data_type': {'list_element_type': {}, 'struct_type': {'fields': {}}, 'time_format': 'time_format_value', 'type_code': 3, 'nullable': True}, 'display_name': 'display_name_value', 'data_stats': {'float64_stats': {'mean': 0.417, 'standard_deviation': 0.1907, 'quantiles': [0.983, 0.984], 'histogram_buckets': [{'min_': 0.419, 'max_': 0.421, 'count': 553}]}, 'string_stats': {'top_unigram_stats': [{'value': 'value_value', 'count': 553}]}, 'timestamp_stats': {'granular_stats': {}}, 'array_stats': {'member_stats': {}}, 'struct_stats': {'field_stats': {}}, 'category_stats': {'top_category_stats': [{'value': 'value_value', 'count': 553}]}, 'distinct_value_count': 2150, 'null_value_count': 1727, 'valid_value_count': 1812}, 'top_correlated_columns': [{'column_spec_id': 'column_spec_id_value', 'correlation_stats': {'cramers_v': 0.962}}], 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.UpdateColumnSpecRequest.meta.fields["column_spec"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["column_spec"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["column_spec"][field])): - del request_init["column_spec"][field][i][subfield] - else: - del request_init["column_spec"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gca_column_spec.ColumnSpec( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gca_column_spec.ColumnSpec.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_column_spec(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gca_column_spec.ColumnSpec) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_column_spec_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_column_spec") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_update_column_spec_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_update_column_spec") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateColumnSpecRequest.pb(service.UpdateColumnSpecRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gca_column_spec.ColumnSpec.to_json(gca_column_spec.ColumnSpec()) - req.return_value.content = return_value - - request = service.UpdateColumnSpecRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gca_column_spec.ColumnSpec() - post_with_metadata.return_value = gca_column_spec.ColumnSpec(), metadata - - client.update_column_spec(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_model_rest_bad_request(request_type=service.CreateModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.CreateModelRequest, - dict, -]) -def test_create_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["model"] = {'translation_model_metadata': {'base_model': 'base_model_value', 'source_language_code': 'source_language_code_value', 'target_language_code': 'target_language_code_value'}, 'image_classification_model_metadata': {'base_model_id': 'base_model_id_value', 'train_budget': 1272, 'train_cost': 1078, 'stop_reason': 'stop_reason_value', 'model_type': 'model_type_value', 'node_qps': 0.857, 'node_count': 1070}, 'text_classification_model_metadata': {'classification_type': 1}, 'image_object_detection_model_metadata': {'model_type': 'model_type_value', 'node_count': 1070, 'node_qps': 0.857, 'stop_reason': 'stop_reason_value', 'train_budget_milli_node_hours': 3075, 'train_cost_milli_node_hours': 2881}, 'video_classification_model_metadata': {}, 'video_object_tracking_model_metadata': {}, 'text_extraction_model_metadata': {'model_hint': 'model_hint_value'}, 'tables_model_metadata': {'optimization_objective_recall_value': 0.37270000000000003, 'optimization_objective_precision_value': 0.4072, 'target_column_spec': {'name': 'name_value', 'data_type': {'list_element_type': {}, 'struct_type': {'fields': {}}, 'time_format': 'time_format_value', 'type_code': 3, 'nullable': True}, 'display_name': 'display_name_value', 'data_stats': {'float64_stats': {'mean': 0.417, 'standard_deviation': 0.1907, 'quantiles': [0.983, 0.984], 'histogram_buckets': [{'min_': 0.419, 'max_': 0.421, 'count': 553}]}, 'string_stats': {'top_unigram_stats': [{'value': 'value_value', 'count': 553}]}, 'timestamp_stats': {'granular_stats': {}}, 'array_stats': {'member_stats': {}}, 'struct_stats': {'field_stats': {}}, 'category_stats': {'top_category_stats': [{'value': 'value_value', 'count': 553}]}, 'distinct_value_count': 2150, 'null_value_count': 1727, 'valid_value_count': 1812}, 'top_correlated_columns': [{'column_spec_id': 'column_spec_id_value', 'correlation_stats': {'cramers_v': 0.962}}], 'etag': 'etag_value'}, 'input_feature_column_specs': {}, 'optimization_objective': 'optimization_objective_value', 'tables_model_column_info': [{'column_spec_name': 'column_spec_name_value', 'column_display_name': 'column_display_name_value', 'feature_importance': 0.1917}], 'train_budget_milli_node_hours': 3075, 'train_cost_milli_node_hours': 2881, 'disable_early_stopping': True}, 'text_sentiment_model_metadata': {}, 'name': 'name_value', 'display_name': 'display_name_value', 'dataset_id': 'dataset_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'deployment_state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = service.CreateModelRequest.meta.fields["model"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["model"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["model"][field])): - del request_init["model"][field][i][subfield] - else: - del request_init["model"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_create_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_create_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.CreateModelRequest.pb(service.CreateModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.CreateModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_model_rest_bad_request(request_type=service.GetModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelRequest, - dict, -]) -def test_get_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model.Model( - name='name_value', - display_name='display_name_value', - dataset_id='dataset_id_value', - deployment_state=model.Model.DeploymentState.DEPLOYED, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model.Model.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_model(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, model.Model) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.dataset_id == 'dataset_id_value' - assert response.deployment_state == model.Model.DeploymentState.DEPLOYED - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetModelRequest.pb(service.GetModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = model.Model.to_json(model.Model()) - req.return_value.content = return_value - - request = service.GetModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = model.Model() - post_with_metadata.return_value = model.Model(), metadata - - client.get_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_models_rest_bad_request(request_type=service.ListModelsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_models(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelsRequest, - dict, -]) -def test_list_models_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_models(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_models_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_models") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_models_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_models") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListModelsRequest.pb(service.ListModelsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListModelsResponse.to_json(service.ListModelsResponse()) - req.return_value.content = return_value - - request = service.ListModelsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListModelsResponse() - post_with_metadata.return_value = service.ListModelsResponse(), metadata - - client.list_models(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_model_rest_bad_request(request_type=service.DeleteModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeleteModelRequest, - dict, -]) -def test_delete_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_delete_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_delete_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeleteModelRequest.pb(service.DeleteModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeleteModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_deploy_model_rest_bad_request(request_type=service.DeployModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.deploy_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.DeployModelRequest, - dict, -]) -def test_deploy_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.deploy_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_deploy_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_deploy_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_deploy_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_deploy_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.DeployModelRequest.pb(service.DeployModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.DeployModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.deploy_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_undeploy_model_rest_bad_request(request_type=service.UndeployModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.undeploy_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.UndeployModelRequest, - dict, -]) -def test_undeploy_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.undeploy_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undeploy_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_undeploy_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_undeploy_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_undeploy_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UndeployModelRequest.pb(service.UndeployModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.UndeployModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.undeploy_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_export_model_rest_bad_request(request_type=service.ExportModelRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_model(request) - - -@pytest.mark.parametrize("request_type", [ - service.ExportModelRequest, - dict, -]) -def test_export_model_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_model(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_model_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_model") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_model_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_export_model") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ExportModelRequest.pb(service.ExportModelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ExportModelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_model(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_export_evaluated_examples_rest_bad_request(request_type=service.ExportEvaluatedExamplesRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_evaluated_examples(request) - - -@pytest.mark.parametrize("request_type", [ - service.ExportEvaluatedExamplesRequest, - dict, -]) -def test_export_evaluated_examples_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_evaluated_examples(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_evaluated_examples_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_evaluated_examples") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_export_evaluated_examples_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_export_evaluated_examples") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ExportEvaluatedExamplesRequest.pb(service.ExportEvaluatedExamplesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = service.ExportEvaluatedExamplesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.export_evaluated_examples(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_model_evaluation_rest_bad_request(request_type=service.GetModelEvaluationRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3/modelEvaluations/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_model_evaluation(request) - - -@pytest.mark.parametrize("request_type", [ - service.GetModelEvaluationRequest, - dict, -]) -def test_get_model_evaluation_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3/modelEvaluations/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = model_evaluation.ModelEvaluation( - name='name_value', - annotation_spec_id='annotation_spec_id_value', - display_name='display_name_value', - evaluated_example_count=2446, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = model_evaluation.ModelEvaluation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_model_evaluation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, model_evaluation.ModelEvaluation) - assert response.name == 'name_value' - assert response.annotation_spec_id == 'annotation_spec_id_value' - assert response.display_name == 'display_name_value' - assert response.evaluated_example_count == 2446 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_model_evaluation_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model_evaluation") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_get_model_evaluation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_get_model_evaluation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.GetModelEvaluationRequest.pb(service.GetModelEvaluationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = model_evaluation.ModelEvaluation.to_json(model_evaluation.ModelEvaluation()) - req.return_value.content = return_value - - request = service.GetModelEvaluationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = model_evaluation.ModelEvaluation() - post_with_metadata.return_value = model_evaluation.ModelEvaluation(), metadata - - client.get_model_evaluation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_model_evaluations_rest_bad_request(request_type=service.ListModelEvaluationsRequest): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_model_evaluations(request) - - -@pytest.mark.parametrize("request_type", [ - service.ListModelEvaluationsRequest, - dict, -]) -def test_list_model_evaluations_rest_call_success(request_type): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = service.ListModelEvaluationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = service.ListModelEvaluationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_model_evaluations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListModelEvaluationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_model_evaluations_rest_interceptors(null_interceptor): - transport = transports.AutoMlRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AutoMlRestInterceptor(), - ) - client = AutoMlClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_model_evaluations") as post, \ - mock.patch.object(transports.AutoMlRestInterceptor, "post_list_model_evaluations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AutoMlRestInterceptor, "pre_list_model_evaluations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.ListModelEvaluationsRequest.pb(service.ListModelEvaluationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListModelEvaluationsResponse.to_json(service.ListModelEvaluationsResponse()) - req.return_value.content = return_value - - request = service.ListModelEvaluationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = service.ListModelEvaluationsResponse() - post_with_metadata.return_value = service.ListModelEvaluationsResponse(), metadata - - client.list_model_evaluations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_dataset), - '__call__') as call: - client.create_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_dataset), - '__call__') as call: - client.get_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_datasets_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_datasets), - '__call__') as call: - client.list_datasets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListDatasetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_dataset), - '__call__') as call: - client.update_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_dataset_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_dataset), - '__call__') as call: - client.delete_dataset(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteDatasetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_data_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.import_data), - '__call__') as call: - client.import_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ImportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_data_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_data), - '__call__') as call: - client.export_data(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportDataRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_annotation_spec_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_annotation_spec), - '__call__') as call: - client.get_annotation_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetAnnotationSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_spec_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table_spec), - '__call__') as call: - client.get_table_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetTableSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_table_specs_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_table_specs), - '__call__') as call: - client.list_table_specs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListTableSpecsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_table_spec_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table_spec), - '__call__') as call: - client.update_table_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateTableSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_column_spec_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_column_spec), - '__call__') as call: - client.get_column_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetColumnSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_column_specs_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_column_specs), - '__call__') as call: - client.list_column_specs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListColumnSpecsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_column_spec_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_column_spec), - '__call__') as call: - client.update_column_spec(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UpdateColumnSpecRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_model), - '__call__') as call: - client.create_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.CreateModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model), - '__call__') as call: - client.get_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_models_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_models), - '__call__') as call: - client.list_models(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_model), - '__call__') as call: - client.delete_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeleteModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_deploy_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.deploy_model), - '__call__') as call: - client.deploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.DeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_undeploy_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.undeploy_model), - '__call__') as call: - client.undeploy_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.UndeployModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_model_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_model), - '__call__') as call: - client.export_model(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportModelRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_evaluated_examples_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_evaluated_examples), - '__call__') as call: - client.export_evaluated_examples(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ExportEvaluatedExamplesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_model_evaluation_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_model_evaluation), - '__call__') as call: - client.get_model_evaluation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.GetModelEvaluationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_model_evaluations_empty_call_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_model_evaluations), - '__call__') as call: - client.list_model_evaluations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = service.ListModelEvaluationsRequest() - - assert args[0] == request_msg - - -def test_auto_ml_rest_lro_client(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AutoMlGrpcTransport, - ) - -def test_auto_ml_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AutoMlTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_auto_ml_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.automl_v1beta1.services.auto_ml.transports.AutoMlTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AutoMlTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_dataset', - 'get_dataset', - 'list_datasets', - 'update_dataset', - 'delete_dataset', - 'import_data', - 'export_data', - 'get_annotation_spec', - 'get_table_spec', - 'list_table_specs', - 'update_table_spec', - 'get_column_spec', - 'list_column_specs', - 'update_column_spec', - 'create_model', - 'get_model', - 'list_models', - 'delete_model', - 'deploy_model', - 'undeploy_model', - 'export_model', - 'export_evaluated_examples', - 'get_model_evaluation', - 'list_model_evaluations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_auto_ml_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.automl_v1beta1.services.auto_ml.transports.AutoMlTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoMlTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_auto_ml_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.automl_v1beta1.services.auto_ml.transports.AutoMlTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoMlTransport() - adc.assert_called_once() - - -def test_auto_ml_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AutoMlClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AutoMlGrpcTransport, - transports.AutoMlGrpcAsyncIOTransport, - ], -) -def test_auto_ml_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AutoMlGrpcTransport, - transports.AutoMlGrpcAsyncIOTransport, - transports.AutoMlRestTransport, - ], -) -def test_auto_ml_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AutoMlGrpcTransport, grpc_helpers), - (transports.AutoMlGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_auto_ml_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AutoMlGrpcTransport, transports.AutoMlGrpcAsyncIOTransport]) -def test_auto_ml_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_auto_ml_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AutoMlRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_auto_ml_host_no_port(transport_name): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_auto_ml_host_with_port(transport_name): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_auto_ml_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AutoMlClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AutoMlClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_dataset._session - session2 = client2.transport.create_dataset._session - assert session1 != session2 - session1 = client1.transport.get_dataset._session - session2 = client2.transport.get_dataset._session - assert session1 != session2 - session1 = client1.transport.list_datasets._session - session2 = client2.transport.list_datasets._session - assert session1 != session2 - session1 = client1.transport.update_dataset._session - session2 = client2.transport.update_dataset._session - assert session1 != session2 - session1 = client1.transport.delete_dataset._session - session2 = client2.transport.delete_dataset._session - assert session1 != session2 - session1 = client1.transport.import_data._session - session2 = client2.transport.import_data._session - assert session1 != session2 - session1 = client1.transport.export_data._session - session2 = client2.transport.export_data._session - assert session1 != session2 - session1 = client1.transport.get_annotation_spec._session - session2 = client2.transport.get_annotation_spec._session - assert session1 != session2 - session1 = client1.transport.get_table_spec._session - session2 = client2.transport.get_table_spec._session - assert session1 != session2 - session1 = client1.transport.list_table_specs._session - session2 = client2.transport.list_table_specs._session - assert session1 != session2 - session1 = client1.transport.update_table_spec._session - session2 = client2.transport.update_table_spec._session - assert session1 != session2 - session1 = client1.transport.get_column_spec._session - session2 = client2.transport.get_column_spec._session - assert session1 != session2 - session1 = client1.transport.list_column_specs._session - session2 = client2.transport.list_column_specs._session - assert session1 != session2 - session1 = client1.transport.update_column_spec._session - session2 = client2.transport.update_column_spec._session - assert session1 != session2 - session1 = client1.transport.create_model._session - session2 = client2.transport.create_model._session - assert session1 != session2 - session1 = client1.transport.get_model._session - session2 = client2.transport.get_model._session - assert session1 != session2 - session1 = client1.transport.list_models._session - session2 = client2.transport.list_models._session - assert session1 != session2 - session1 = client1.transport.delete_model._session - session2 = client2.transport.delete_model._session - assert session1 != session2 - session1 = client1.transport.deploy_model._session - session2 = client2.transport.deploy_model._session - assert session1 != session2 - session1 = client1.transport.undeploy_model._session - session2 = client2.transport.undeploy_model._session - assert session1 != session2 - session1 = client1.transport.export_model._session - session2 = client2.transport.export_model._session - assert session1 != session2 - session1 = client1.transport.export_evaluated_examples._session - session2 = client2.transport.export_evaluated_examples._session - assert session1 != session2 - session1 = client1.transport.get_model_evaluation._session - session2 = client2.transport.get_model_evaluation._session - assert session1 != session2 - session1 = client1.transport.list_model_evaluations._session - session2 = client2.transport.list_model_evaluations._session - assert session1 != session2 -def test_auto_ml_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AutoMlGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_auto_ml_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AutoMlGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AutoMlGrpcTransport, transports.AutoMlGrpcAsyncIOTransport]) -def test_auto_ml_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AutoMlGrpcTransport, transports.AutoMlGrpcAsyncIOTransport]) -def test_auto_ml_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_auto_ml_grpc_lro_client(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_auto_ml_grpc_lro_async_client(): - client = AutoMlAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_annotation_spec_path(): - project = "squid" - location = "clam" - dataset = "whelk" - annotation_spec = "octopus" - expected = "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}".format(project=project, location=location, dataset=dataset, annotation_spec=annotation_spec, ) - actual = AutoMlClient.annotation_spec_path(project, location, dataset, annotation_spec) - assert expected == actual - - -def test_parse_annotation_spec_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "dataset": "cuttlefish", - "annotation_spec": "mussel", - } - path = AutoMlClient.annotation_spec_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_annotation_spec_path(path) - assert expected == actual - -def test_column_spec_path(): - project = "winkle" - location = "nautilus" - dataset = "scallop" - table_spec = "abalone" - column_spec = "squid" - expected = "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}/columnSpecs/{column_spec}".format(project=project, location=location, dataset=dataset, table_spec=table_spec, column_spec=column_spec, ) - actual = AutoMlClient.column_spec_path(project, location, dataset, table_spec, column_spec) - assert expected == actual - - -def test_parse_column_spec_path(): - expected = { - "project": "clam", - "location": "whelk", - "dataset": "octopus", - "table_spec": "oyster", - "column_spec": "nudibranch", - } - path = AutoMlClient.column_spec_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_column_spec_path(path) - assert expected == actual - -def test_dataset_path(): - project = "cuttlefish" - location = "mussel" - dataset = "winkle" - expected = "projects/{project}/locations/{location}/datasets/{dataset}".format(project=project, location=location, dataset=dataset, ) - actual = AutoMlClient.dataset_path(project, location, dataset) - assert expected == actual - - -def test_parse_dataset_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "dataset": "abalone", - } - path = AutoMlClient.dataset_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_dataset_path(path) - assert expected == actual - -def test_model_path(): - project = "squid" - location = "clam" - model = "whelk" - expected = "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - actual = AutoMlClient.model_path(project, location, model) - assert expected == actual - - -def test_parse_model_path(): - expected = { - "project": "octopus", - "location": "oyster", - "model": "nudibranch", - } - path = AutoMlClient.model_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_model_path(path) - assert expected == actual - -def test_model_evaluation_path(): - project = "cuttlefish" - location = "mussel" - model = "winkle" - model_evaluation = "nautilus" - expected = "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}".format(project=project, location=location, model=model, model_evaluation=model_evaluation, ) - actual = AutoMlClient.model_evaluation_path(project, location, model, model_evaluation) - assert expected == actual - - -def test_parse_model_evaluation_path(): - expected = { - "project": "scallop", - "location": "abalone", - "model": "squid", - "model_evaluation": "clam", - } - path = AutoMlClient.model_evaluation_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_model_evaluation_path(path) - assert expected == actual - -def test_table_spec_path(): - project = "whelk" - location = "octopus" - dataset = "oyster" - table_spec = "nudibranch" - expected = "projects/{project}/locations/{location}/datasets/{dataset}/tableSpecs/{table_spec}".format(project=project, location=location, dataset=dataset, table_spec=table_spec, ) - actual = AutoMlClient.table_spec_path(project, location, dataset, table_spec) - assert expected == actual - - -def test_parse_table_spec_path(): - expected = { - "project": "cuttlefish", - "location": "mussel", - "dataset": "winkle", - "table_spec": "nautilus", - } - path = AutoMlClient.table_spec_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_table_spec_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AutoMlClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = AutoMlClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = AutoMlClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = AutoMlClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AutoMlClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = AutoMlClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = AutoMlClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = AutoMlClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AutoMlClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = AutoMlClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AutoMlClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AutoMlTransport, '_prep_wrapped_messages') as prep: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AutoMlTransport, '_prep_wrapped_messages') as prep: - transport_class = AutoMlClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AutoMlAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AutoMlClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AutoMlClient, transports.AutoMlGrpcTransport), - (AutoMlAsyncClient, transports.AutoMlGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/test_prediction_service.py b/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/test_prediction_service.py deleted file mode 100644 index 8d67dbf411a2..000000000000 --- a/owl-bot-staging/google-cloud-automl/v1beta1/tests/unit/gapic/automl_v1beta1/test_prediction_service.py +++ /dev/null @@ -1,2917 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.automl_v1beta1.services.prediction_service import PredictionServiceAsyncClient -from google.cloud.automl_v1beta1.services.prediction_service import PredictionServiceClient -from google.cloud.automl_v1beta1.services.prediction_service import transports -from google.cloud.automl_v1beta1.types import annotation_payload -from google.cloud.automl_v1beta1.types import data_items -from google.cloud.automl_v1beta1.types import geometry -from google.cloud.automl_v1beta1.types import io -from google.cloud.automl_v1beta1.types import operations -from google.cloud.automl_v1beta1.types import prediction_service -from google.cloud.automl_v1beta1.types import text_segment -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import struct_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PredictionServiceClient._get_default_mtls_endpoint(None) is None - assert PredictionServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PredictionServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert PredictionServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - PredictionServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert PredictionServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert PredictionServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - PredictionServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert PredictionServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert PredictionServiceClient._get_client_cert_source(None, False) is None - assert PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert PredictionServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert PredictionServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert PredictionServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert PredictionServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - assert PredictionServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert PredictionServiceClient._get_api_endpoint(None, None, default_universe, "always") == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - assert PredictionServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == PredictionServiceClient.DEFAULT_MTLS_ENDPOINT - assert PredictionServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert PredictionServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - PredictionServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert PredictionServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert PredictionServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert PredictionServiceClient._get_universe_domain(None, None) == PredictionServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - PredictionServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = PredictionServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = PredictionServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (PredictionServiceClient, "grpc"), - (PredictionServiceAsyncClient, "grpc_asyncio"), - (PredictionServiceClient, "rest"), -]) -def test_prediction_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PredictionServiceGrpcTransport, "grpc"), - (transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.PredictionServiceRestTransport, "rest"), -]) -def test_prediction_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PredictionServiceClient, "grpc"), - (PredictionServiceAsyncClient, "grpc_asyncio"), - (PredictionServiceClient, "rest"), -]) -def test_prediction_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://automl.googleapis.com' - ) - - -def test_prediction_service_client_get_transport_class(): - transport = PredictionServiceClient.get_transport_class() - available_transports = [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceRestTransport, - ] - assert transport in available_transports - - transport = PredictionServiceClient.get_transport_class("grpc") - assert transport == transports.PredictionServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), -]) -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -def test_prediction_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PredictionServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PredictionServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", "true"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", "false"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest", "true"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_prediction_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - PredictionServiceClient, PredictionServiceAsyncClient -]) -@mock.patch.object(PredictionServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PredictionServiceAsyncClient)) -def test_prediction_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - PredictionServiceClient, PredictionServiceAsyncClient -]) -@mock.patch.object(PredictionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceClient)) -@mock.patch.object(PredictionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(PredictionServiceAsyncClient)) -def test_prediction_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = PredictionServiceClient._DEFAULT_UNIVERSE - default_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = PredictionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest"), -]) -def test_prediction_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", grpc_helpers), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (PredictionServiceClient, transports.PredictionServiceRestTransport, "rest", None), -]) -def test_prediction_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_prediction_service_client_client_options_from_dict(): - with mock.patch('google.cloud.automl_v1beta1.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PredictionServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc", grpc_helpers), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_prediction_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.PredictRequest, - dict, -]) -def test_predict(request_type, transport: str = 'grpc'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = prediction_service.PredictResponse( - ) - response = client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = prediction_service.PredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, prediction_service.PredictResponse) - - -def test_predict_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = prediction_service.PredictRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.predict(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == prediction_service.PredictRequest( - name='name_value', - ) - -def test_predict_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.predict] = mock_rpc - request = {} - client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.predict in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.predict] = mock_rpc - - request = {} - await client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_predict_async(transport: str = 'grpc_asyncio', request_type=prediction_service.PredictRequest): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse( - )) - response = await client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = prediction_service.PredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, prediction_service.PredictResponse) - - -@pytest.mark.asyncio -async def test_predict_async_from_dict(): - await test_predict_async(request_type=dict) - -def test_predict_field_headers(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.PredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value = prediction_service.PredictResponse() - client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_predict_field_headers_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.PredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse()) - await client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_predict_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = prediction_service.PredictResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.predict( - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].payload - mock_val = data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - - -def test_predict_flattened_error(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.predict( - prediction_service.PredictRequest(), - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - -@pytest.mark.asyncio -async def test_predict_flattened_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = prediction_service.PredictResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.predict( - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].payload - mock_val = data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - -@pytest.mark.asyncio -async def test_predict_flattened_error_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.predict( - prediction_service.PredictRequest(), - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.BatchPredictRequest, - dict, -]) -def test_batch_predict(request_type, transport: str = 'grpc'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = prediction_service.BatchPredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_batch_predict_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = prediction_service.BatchPredictRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_predict(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == prediction_service.BatchPredictRequest( - name='name_value', - ) - -def test_batch_predict_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_predict] = mock_rpc - request = {} - client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_predict_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.batch_predict in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.batch_predict] = mock_rpc - - request = {} - await client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.batch_predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_batch_predict_async(transport: str = 'grpc_asyncio', request_type=prediction_service.BatchPredictRequest): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = prediction_service.BatchPredictRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_batch_predict_async_from_dict(): - await test_batch_predict_async(request_type=dict) - -def test_batch_predict_field_headers(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.BatchPredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_batch_predict_field_headers_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = prediction_service.BatchPredictRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_batch_predict_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.batch_predict( - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - arg = args[0].output_config - mock_val = io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - - -def test_batch_predict_flattened_error(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_predict( - prediction_service.BatchPredictRequest(), - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - -@pytest.mark.asyncio -async def test_batch_predict_flattened_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.batch_predict( - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].input_config - mock_val = io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])) - assert arg == mock_val - arg = args[0].output_config - mock_val = io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')) - assert arg == mock_val - arg = args[0].params - mock_val = {'key_value': 'value_value'} - assert arg == mock_val - -@pytest.mark.asyncio -async def test_batch_predict_flattened_error_async(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.batch_predict( - prediction_service.BatchPredictRequest(), - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - -def test_predict_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.predict] = mock_rpc - - request = {} - client.predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_predict_rest_required_fields(request_type=prediction_service.PredictRequest): - transport_class = transports.PredictionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = prediction_service.PredictResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = prediction_service.PredictResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.predict(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_predict_rest_unset_required_fields(): - transport = transports.PredictionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.predict._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "payload", ))) - - -def test_predict_rest_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = prediction_service.PredictResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = prediction_service.PredictResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.predict(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}:predict" % client.transport._host, args[1]) - - -def test_predict_rest_flattened_error(transport: str = 'rest'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.predict( - prediction_service.PredictRequest(), - name='name_value', - payload=data_items.ExamplePayload(image=data_items.Image(image_bytes=b'image_bytes_blob')), - params={'key_value': 'value_value'}, - ) - - -def test_batch_predict_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.batch_predict in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.batch_predict] = mock_rpc - - request = {} - client.batch_predict(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.batch_predict(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_batch_predict_rest_required_fields(request_type=prediction_service.BatchPredictRequest): - transport_class = transports.PredictionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_predict._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.batch_predict(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_batch_predict_rest_unset_required_fields(): - transport = transports.PredictionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.batch_predict._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "inputConfig", "outputConfig", "params", ))) - - -def test_batch_predict_rest_flattened(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/models/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.batch_predict(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1beta1/{name=projects/*/locations/*/models/*}:batchPredict" % client.transport._host, args[1]) - - -def test_batch_predict_rest_flattened_error(transport: str = 'rest'): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.batch_predict( - prediction_service.BatchPredictRequest(), - name='name_value', - input_config=io.BatchPredictInputConfig(gcs_source=io.GcsSource(input_uris=['input_uris_value'])), - output_config=io.BatchPredictOutputConfig(gcs_destination=io.GcsDestination(output_uri_prefix='output_uri_prefix_value')), - params={'key_value': 'value_value'}, - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PredictionServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PredictionServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PredictionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PredictionServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceGrpcAsyncIOTransport, - transports.PredictionServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = PredictionServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_predict_empty_call_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - call.return_value = prediction_service.PredictResponse() - client.predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.PredictRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_predict_empty_call_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.batch_predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.BatchPredictRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = PredictionServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_predict_empty_call_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(prediction_service.PredictResponse( - )) - await client.predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.PredictRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_predict_empty_call_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.batch_predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.BatchPredictRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = PredictionServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_predict_rest_bad_request(request_type=prediction_service.PredictRequest): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.predict(request) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.PredictRequest, - dict, -]) -def test_predict_rest_call_success(request_type): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = prediction_service.PredictResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = prediction_service.PredictResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.predict(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, prediction_service.PredictResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_predict_rest_interceptors(null_interceptor): - transport = transports.PredictionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PredictionServiceRestInterceptor(), - ) - client = PredictionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_predict") as post, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_predict_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "pre_predict") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = prediction_service.PredictRequest.pb(prediction_service.PredictRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = prediction_service.PredictResponse.to_json(prediction_service.PredictResponse()) - req.return_value.content = return_value - - request = prediction_service.PredictRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = prediction_service.PredictResponse() - post_with_metadata.return_value = prediction_service.PredictResponse(), metadata - - client.predict(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_batch_predict_rest_bad_request(request_type=prediction_service.BatchPredictRequest): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.batch_predict(request) - - -@pytest.mark.parametrize("request_type", [ - prediction_service.BatchPredictRequest, - dict, -]) -def test_batch_predict_rest_call_success(request_type): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/models/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.batch_predict(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_predict_rest_interceptors(null_interceptor): - transport = transports.PredictionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PredictionServiceRestInterceptor(), - ) - client = PredictionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_batch_predict") as post, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "post_batch_predict_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.PredictionServiceRestInterceptor, "pre_batch_predict") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = prediction_service.BatchPredictRequest.pb(prediction_service.BatchPredictRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = prediction_service.BatchPredictRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.batch_predict(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_predict_empty_call_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.predict), - '__call__') as call: - client.predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.PredictRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_batch_predict_empty_call_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.batch_predict), - '__call__') as call: - client.batch_predict(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = prediction_service.BatchPredictRequest() - - assert args[0] == request_msg - - -def test_prediction_service_rest_lro_client(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PredictionServiceGrpcTransport, - ) - -def test_prediction_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PredictionServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_prediction_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.automl_v1beta1.services.prediction_service.transports.PredictionServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PredictionServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'predict', - 'batch_predict', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_prediction_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.automl_v1beta1.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PredictionServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_prediction_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.automl_v1beta1.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PredictionServiceTransport() - adc.assert_called_once() - - -def test_prediction_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PredictionServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceGrpcAsyncIOTransport, - ], -) -def test_prediction_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PredictionServiceGrpcTransport, - transports.PredictionServiceGrpcAsyncIOTransport, - transports.PredictionServiceRestTransport, - ], -) -def test_prediction_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PredictionServiceGrpcTransport, grpc_helpers), - (transports.PredictionServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_prediction_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "automl.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="automl.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PredictionServiceGrpcTransport, transports.PredictionServiceGrpcAsyncIOTransport]) -def test_prediction_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_prediction_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.PredictionServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_prediction_service_host_no_port(transport_name): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_prediction_service_host_with_port(transport_name): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='automl.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'automl.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://automl.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_prediction_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = PredictionServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = PredictionServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.predict._session - session2 = client2.transport.predict._session - assert session1 != session2 - session1 = client1.transport.batch_predict._session - session2 = client2.transport.batch_predict._session - assert session1 != session2 -def test_prediction_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PredictionServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_prediction_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PredictionServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PredictionServiceGrpcTransport, transports.PredictionServiceGrpcAsyncIOTransport]) -def test_prediction_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PredictionServiceGrpcTransport, transports.PredictionServiceGrpcAsyncIOTransport]) -def test_prediction_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_prediction_service_grpc_lro_client(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_prediction_service_grpc_lro_async_client(): - client = PredictionServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_model_path(): - project = "squid" - location = "clam" - model = "whelk" - expected = "projects/{project}/locations/{location}/models/{model}".format(project=project, location=location, model=model, ) - actual = PredictionServiceClient.model_path(project, location, model) - assert expected == actual - - -def test_parse_model_path(): - expected = { - "project": "octopus", - "location": "oyster", - "model": "nudibranch", - } - path = PredictionServiceClient.model_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_model_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PredictionServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = PredictionServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = PredictionServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = PredictionServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PredictionServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = PredictionServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = PredictionServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = PredictionServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PredictionServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = PredictionServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PredictionServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PredictionServiceTransport, '_prep_wrapped_messages') as prep: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PredictionServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = PredictionServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = PredictionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = PredictionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PredictionServiceClient, transports.PredictionServiceGrpcTransport), - (PredictionServiceAsyncClient, transports.PredictionServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/.coveragerc b/owl-bot-staging/google-cloud-backupdr/v1/.coveragerc deleted file mode 100644 index 9806deca7d3b..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/backupdr/__init__.py - google/cloud/backupdr/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-backupdr/v1/.flake8 b/owl-bot-staging/google-cloud-backupdr/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-backupdr/v1/MANIFEST.in b/owl-bot-staging/google-cloud-backupdr/v1/MANIFEST.in deleted file mode 100644 index 6c1844e6a4b2..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/backupdr *.py -recursive-include google/cloud/backupdr_v1 *.py diff --git a/owl-bot-staging/google-cloud-backupdr/v1/README.rst b/owl-bot-staging/google-cloud-backupdr/v1/README.rst deleted file mode 100644 index 5f47c32c6918..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Backupdr API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Backupdr API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-backupdr/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/backup_dr.rst b/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/backup_dr.rst deleted file mode 100644 index d7f1fc1df7c9..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/backup_dr.rst +++ /dev/null @@ -1,10 +0,0 @@ -BackupDR --------------------------- - -.. automodule:: google.cloud.backupdr_v1.services.backup_dr - :members: - :inherited-members: - -.. automodule:: google.cloud.backupdr_v1.services.backup_dr.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/services_.rst b/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/services_.rst deleted file mode 100644 index 512dd68a7dda..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Backupdr v1 API -========================================= -.. toctree:: - :maxdepth: 2 - - backup_dr diff --git a/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/types_.rst b/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/types_.rst deleted file mode 100644 index a796bcae4303..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/docs/backupdr_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Backupdr v1 API -====================================== - -.. automodule:: google.cloud.backupdr_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-backupdr/v1/docs/conf.py b/owl-bot-staging/google-cloud-backupdr/v1/docs/conf.py deleted file mode 100644 index f5af857a3e92..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-backupdr documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-backupdr" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-backupdr-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-backupdr.tex", - u"google-cloud-backupdr Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-backupdr", - u"Google Cloud Backupdr Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-backupdr", - u"google-cloud-backupdr Documentation", - author, - "google-cloud-backupdr", - "GAPIC library for Google Cloud Backupdr API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-backupdr/v1/docs/index.rst b/owl-bot-staging/google-cloud-backupdr/v1/docs/index.rst deleted file mode 100644 index 7b5598ef28b0..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - backupdr_v1/services_ - backupdr_v1/types_ diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/__init__.py deleted file mode 100644 index 4043ac616eb1..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/__init__.py +++ /dev/null @@ -1,209 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.backupdr import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.backupdr_v1.services.backup_dr.client import BackupDRClient -from google.cloud.backupdr_v1.services.backup_dr.async_client import BackupDRAsyncClient - -from google.cloud.backupdr_v1.types.backupdr import CreateManagementServerRequest -from google.cloud.backupdr_v1.types.backupdr import DeleteManagementServerRequest -from google.cloud.backupdr_v1.types.backupdr import GetManagementServerRequest -from google.cloud.backupdr_v1.types.backupdr import InitializeServiceRequest -from google.cloud.backupdr_v1.types.backupdr import InitializeServiceResponse -from google.cloud.backupdr_v1.types.backupdr import ListManagementServersRequest -from google.cloud.backupdr_v1.types.backupdr import ListManagementServersResponse -from google.cloud.backupdr_v1.types.backupdr import ManagementServer -from google.cloud.backupdr_v1.types.backupdr import ManagementURI -from google.cloud.backupdr_v1.types.backupdr import NetworkConfig -from google.cloud.backupdr_v1.types.backupdr import OperationMetadata -from google.cloud.backupdr_v1.types.backupdr import WorkforceIdentityBasedManagementURI -from google.cloud.backupdr_v1.types.backupdr import WorkforceIdentityBasedOAuth2ClientID -from google.cloud.backupdr_v1.types.backupplan import BackupPlan -from google.cloud.backupdr_v1.types.backupplan import BackupRule -from google.cloud.backupdr_v1.types.backupplan import BackupWindow -from google.cloud.backupdr_v1.types.backupplan import CreateBackupPlanRequest -from google.cloud.backupdr_v1.types.backupplan import DeleteBackupPlanRequest -from google.cloud.backupdr_v1.types.backupplan import GetBackupPlanRequest -from google.cloud.backupdr_v1.types.backupplan import ListBackupPlansRequest -from google.cloud.backupdr_v1.types.backupplan import ListBackupPlansResponse -from google.cloud.backupdr_v1.types.backupplan import StandardSchedule -from google.cloud.backupdr_v1.types.backupplan import WeekDayOfMonth -from google.cloud.backupdr_v1.types.backupplanassociation import BackupPlanAssociation -from google.cloud.backupdr_v1.types.backupplanassociation import CreateBackupPlanAssociationRequest -from google.cloud.backupdr_v1.types.backupplanassociation import DeleteBackupPlanAssociationRequest -from google.cloud.backupdr_v1.types.backupplanassociation import GetBackupPlanAssociationRequest -from google.cloud.backupdr_v1.types.backupplanassociation import ListBackupPlanAssociationsRequest -from google.cloud.backupdr_v1.types.backupplanassociation import ListBackupPlanAssociationsResponse -from google.cloud.backupdr_v1.types.backupplanassociation import RuleConfigInfo -from google.cloud.backupdr_v1.types.backupplanassociation import TriggerBackupRequest -from google.cloud.backupdr_v1.types.backupvault import Backup -from google.cloud.backupdr_v1.types.backupvault import BackupApplianceBackupConfig -from google.cloud.backupdr_v1.types.backupvault import BackupApplianceLockInfo -from google.cloud.backupdr_v1.types.backupvault import BackupConfigInfo -from google.cloud.backupdr_v1.types.backupvault import BackupLock -from google.cloud.backupdr_v1.types.backupvault import BackupVault -from google.cloud.backupdr_v1.types.backupvault import CreateBackupVaultRequest -from google.cloud.backupdr_v1.types.backupvault import DataSource -from google.cloud.backupdr_v1.types.backupvault import DataSourceBackupApplianceApplication -from google.cloud.backupdr_v1.types.backupvault import DataSourceGcpResource -from google.cloud.backupdr_v1.types.backupvault import DeleteBackupRequest -from google.cloud.backupdr_v1.types.backupvault import DeleteBackupVaultRequest -from google.cloud.backupdr_v1.types.backupvault import FetchUsableBackupVaultsRequest -from google.cloud.backupdr_v1.types.backupvault import FetchUsableBackupVaultsResponse -from google.cloud.backupdr_v1.types.backupvault import GcpBackupConfig -from google.cloud.backupdr_v1.types.backupvault import GcpResource -from google.cloud.backupdr_v1.types.backupvault import GetBackupRequest -from google.cloud.backupdr_v1.types.backupvault import GetBackupVaultRequest -from google.cloud.backupdr_v1.types.backupvault import GetDataSourceRequest -from google.cloud.backupdr_v1.types.backupvault import ListBackupsRequest -from google.cloud.backupdr_v1.types.backupvault import ListBackupsResponse -from google.cloud.backupdr_v1.types.backupvault import ListBackupVaultsRequest -from google.cloud.backupdr_v1.types.backupvault import ListBackupVaultsResponse -from google.cloud.backupdr_v1.types.backupvault import ListDataSourcesRequest -from google.cloud.backupdr_v1.types.backupvault import ListDataSourcesResponse -from google.cloud.backupdr_v1.types.backupvault import RestoreBackupRequest -from google.cloud.backupdr_v1.types.backupvault import RestoreBackupResponse -from google.cloud.backupdr_v1.types.backupvault import ServiceLockInfo -from google.cloud.backupdr_v1.types.backupvault import TargetResource -from google.cloud.backupdr_v1.types.backupvault import UpdateBackupRequest -from google.cloud.backupdr_v1.types.backupvault import UpdateBackupVaultRequest -from google.cloud.backupdr_v1.types.backupvault import UpdateDataSourceRequest -from google.cloud.backupdr_v1.types.backupvault import BackupConfigState -from google.cloud.backupdr_v1.types.backupvault import BackupVaultView -from google.cloud.backupdr_v1.types.backupvault import BackupView -from google.cloud.backupdr_v1.types.backupvault_ba import BackupApplianceBackupProperties -from google.cloud.backupdr_v1.types.backupvault_gce import AcceleratorConfig -from google.cloud.backupdr_v1.types.backupvault_gce import AccessConfig -from google.cloud.backupdr_v1.types.backupvault_gce import AdvancedMachineFeatures -from google.cloud.backupdr_v1.types.backupvault_gce import AliasIpRange -from google.cloud.backupdr_v1.types.backupvault_gce import AllocationAffinity -from google.cloud.backupdr_v1.types.backupvault_gce import AttachedDisk -from google.cloud.backupdr_v1.types.backupvault_gce import ComputeInstanceBackupProperties -from google.cloud.backupdr_v1.types.backupvault_gce import ComputeInstanceDataSourceProperties -from google.cloud.backupdr_v1.types.backupvault_gce import ComputeInstanceRestoreProperties -from google.cloud.backupdr_v1.types.backupvault_gce import ComputeInstanceTargetEnvironment -from google.cloud.backupdr_v1.types.backupvault_gce import ConfidentialInstanceConfig -from google.cloud.backupdr_v1.types.backupvault_gce import CustomerEncryptionKey -from google.cloud.backupdr_v1.types.backupvault_gce import DisplayDevice -from google.cloud.backupdr_v1.types.backupvault_gce import Entry -from google.cloud.backupdr_v1.types.backupvault_gce import GuestOsFeature -from google.cloud.backupdr_v1.types.backupvault_gce import InstanceParams -from google.cloud.backupdr_v1.types.backupvault_gce import Metadata -from google.cloud.backupdr_v1.types.backupvault_gce import NetworkInterface -from google.cloud.backupdr_v1.types.backupvault_gce import NetworkPerformanceConfig -from google.cloud.backupdr_v1.types.backupvault_gce import Scheduling -from google.cloud.backupdr_v1.types.backupvault_gce import SchedulingDuration -from google.cloud.backupdr_v1.types.backupvault_gce import ServiceAccount -from google.cloud.backupdr_v1.types.backupvault_gce import Tags -from google.cloud.backupdr_v1.types.backupvault_gce import KeyRevocationActionType - -__all__ = ('BackupDRClient', - 'BackupDRAsyncClient', - 'CreateManagementServerRequest', - 'DeleteManagementServerRequest', - 'GetManagementServerRequest', - 'InitializeServiceRequest', - 'InitializeServiceResponse', - 'ListManagementServersRequest', - 'ListManagementServersResponse', - 'ManagementServer', - 'ManagementURI', - 'NetworkConfig', - 'OperationMetadata', - 'WorkforceIdentityBasedManagementURI', - 'WorkforceIdentityBasedOAuth2ClientID', - 'BackupPlan', - 'BackupRule', - 'BackupWindow', - 'CreateBackupPlanRequest', - 'DeleteBackupPlanRequest', - 'GetBackupPlanRequest', - 'ListBackupPlansRequest', - 'ListBackupPlansResponse', - 'StandardSchedule', - 'WeekDayOfMonth', - 'BackupPlanAssociation', - 'CreateBackupPlanAssociationRequest', - 'DeleteBackupPlanAssociationRequest', - 'GetBackupPlanAssociationRequest', - 'ListBackupPlanAssociationsRequest', - 'ListBackupPlanAssociationsResponse', - 'RuleConfigInfo', - 'TriggerBackupRequest', - 'Backup', - 'BackupApplianceBackupConfig', - 'BackupApplianceLockInfo', - 'BackupConfigInfo', - 'BackupLock', - 'BackupVault', - 'CreateBackupVaultRequest', - 'DataSource', - 'DataSourceBackupApplianceApplication', - 'DataSourceGcpResource', - 'DeleteBackupRequest', - 'DeleteBackupVaultRequest', - 'FetchUsableBackupVaultsRequest', - 'FetchUsableBackupVaultsResponse', - 'GcpBackupConfig', - 'GcpResource', - 'GetBackupRequest', - 'GetBackupVaultRequest', - 'GetDataSourceRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListBackupVaultsRequest', - 'ListBackupVaultsResponse', - 'ListDataSourcesRequest', - 'ListDataSourcesResponse', - 'RestoreBackupRequest', - 'RestoreBackupResponse', - 'ServiceLockInfo', - 'TargetResource', - 'UpdateBackupRequest', - 'UpdateBackupVaultRequest', - 'UpdateDataSourceRequest', - 'BackupConfigState', - 'BackupVaultView', - 'BackupView', - 'BackupApplianceBackupProperties', - 'AcceleratorConfig', - 'AccessConfig', - 'AdvancedMachineFeatures', - 'AliasIpRange', - 'AllocationAffinity', - 'AttachedDisk', - 'ComputeInstanceBackupProperties', - 'ComputeInstanceDataSourceProperties', - 'ComputeInstanceRestoreProperties', - 'ComputeInstanceTargetEnvironment', - 'ConfidentialInstanceConfig', - 'CustomerEncryptionKey', - 'DisplayDevice', - 'Entry', - 'GuestOsFeature', - 'InstanceParams', - 'Metadata', - 'NetworkInterface', - 'NetworkPerformanceConfig', - 'Scheduling', - 'SchedulingDuration', - 'ServiceAccount', - 'Tags', - 'KeyRevocationActionType', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/gapic_version.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/py.typed b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/py.typed deleted file mode 100644 index 195a9f5c16ac..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-backupdr package uses inline types. diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/__init__.py deleted file mode 100644 index ea3c7b09aab2..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/__init__.py +++ /dev/null @@ -1,210 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.backupdr_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.backup_dr import BackupDRClient -from .services.backup_dr import BackupDRAsyncClient - -from .types.backupdr import CreateManagementServerRequest -from .types.backupdr import DeleteManagementServerRequest -from .types.backupdr import GetManagementServerRequest -from .types.backupdr import InitializeServiceRequest -from .types.backupdr import InitializeServiceResponse -from .types.backupdr import ListManagementServersRequest -from .types.backupdr import ListManagementServersResponse -from .types.backupdr import ManagementServer -from .types.backupdr import ManagementURI -from .types.backupdr import NetworkConfig -from .types.backupdr import OperationMetadata -from .types.backupdr import WorkforceIdentityBasedManagementURI -from .types.backupdr import WorkforceIdentityBasedOAuth2ClientID -from .types.backupplan import BackupPlan -from .types.backupplan import BackupRule -from .types.backupplan import BackupWindow -from .types.backupplan import CreateBackupPlanRequest -from .types.backupplan import DeleteBackupPlanRequest -from .types.backupplan import GetBackupPlanRequest -from .types.backupplan import ListBackupPlansRequest -from .types.backupplan import ListBackupPlansResponse -from .types.backupplan import StandardSchedule -from .types.backupplan import WeekDayOfMonth -from .types.backupplanassociation import BackupPlanAssociation -from .types.backupplanassociation import CreateBackupPlanAssociationRequest -from .types.backupplanassociation import DeleteBackupPlanAssociationRequest -from .types.backupplanassociation import GetBackupPlanAssociationRequest -from .types.backupplanassociation import ListBackupPlanAssociationsRequest -from .types.backupplanassociation import ListBackupPlanAssociationsResponse -from .types.backupplanassociation import RuleConfigInfo -from .types.backupplanassociation import TriggerBackupRequest -from .types.backupvault import Backup -from .types.backupvault import BackupApplianceBackupConfig -from .types.backupvault import BackupApplianceLockInfo -from .types.backupvault import BackupConfigInfo -from .types.backupvault import BackupLock -from .types.backupvault import BackupVault -from .types.backupvault import CreateBackupVaultRequest -from .types.backupvault import DataSource -from .types.backupvault import DataSourceBackupApplianceApplication -from .types.backupvault import DataSourceGcpResource -from .types.backupvault import DeleteBackupRequest -from .types.backupvault import DeleteBackupVaultRequest -from .types.backupvault import FetchUsableBackupVaultsRequest -from .types.backupvault import FetchUsableBackupVaultsResponse -from .types.backupvault import GcpBackupConfig -from .types.backupvault import GcpResource -from .types.backupvault import GetBackupRequest -from .types.backupvault import GetBackupVaultRequest -from .types.backupvault import GetDataSourceRequest -from .types.backupvault import ListBackupsRequest -from .types.backupvault import ListBackupsResponse -from .types.backupvault import ListBackupVaultsRequest -from .types.backupvault import ListBackupVaultsResponse -from .types.backupvault import ListDataSourcesRequest -from .types.backupvault import ListDataSourcesResponse -from .types.backupvault import RestoreBackupRequest -from .types.backupvault import RestoreBackupResponse -from .types.backupvault import ServiceLockInfo -from .types.backupvault import TargetResource -from .types.backupvault import UpdateBackupRequest -from .types.backupvault import UpdateBackupVaultRequest -from .types.backupvault import UpdateDataSourceRequest -from .types.backupvault import BackupConfigState -from .types.backupvault import BackupVaultView -from .types.backupvault import BackupView -from .types.backupvault_ba import BackupApplianceBackupProperties -from .types.backupvault_gce import AcceleratorConfig -from .types.backupvault_gce import AccessConfig -from .types.backupvault_gce import AdvancedMachineFeatures -from .types.backupvault_gce import AliasIpRange -from .types.backupvault_gce import AllocationAffinity -from .types.backupvault_gce import AttachedDisk -from .types.backupvault_gce import ComputeInstanceBackupProperties -from .types.backupvault_gce import ComputeInstanceDataSourceProperties -from .types.backupvault_gce import ComputeInstanceRestoreProperties -from .types.backupvault_gce import ComputeInstanceTargetEnvironment -from .types.backupvault_gce import ConfidentialInstanceConfig -from .types.backupvault_gce import CustomerEncryptionKey -from .types.backupvault_gce import DisplayDevice -from .types.backupvault_gce import Entry -from .types.backupvault_gce import GuestOsFeature -from .types.backupvault_gce import InstanceParams -from .types.backupvault_gce import Metadata -from .types.backupvault_gce import NetworkInterface -from .types.backupvault_gce import NetworkPerformanceConfig -from .types.backupvault_gce import Scheduling -from .types.backupvault_gce import SchedulingDuration -from .types.backupvault_gce import ServiceAccount -from .types.backupvault_gce import Tags -from .types.backupvault_gce import KeyRevocationActionType - -__all__ = ( - 'BackupDRAsyncClient', -'AcceleratorConfig', -'AccessConfig', -'AdvancedMachineFeatures', -'AliasIpRange', -'AllocationAffinity', -'AttachedDisk', -'Backup', -'BackupApplianceBackupConfig', -'BackupApplianceBackupProperties', -'BackupApplianceLockInfo', -'BackupConfigInfo', -'BackupConfigState', -'BackupDRClient', -'BackupLock', -'BackupPlan', -'BackupPlanAssociation', -'BackupRule', -'BackupVault', -'BackupVaultView', -'BackupView', -'BackupWindow', -'ComputeInstanceBackupProperties', -'ComputeInstanceDataSourceProperties', -'ComputeInstanceRestoreProperties', -'ComputeInstanceTargetEnvironment', -'ConfidentialInstanceConfig', -'CreateBackupPlanAssociationRequest', -'CreateBackupPlanRequest', -'CreateBackupVaultRequest', -'CreateManagementServerRequest', -'CustomerEncryptionKey', -'DataSource', -'DataSourceBackupApplianceApplication', -'DataSourceGcpResource', -'DeleteBackupPlanAssociationRequest', -'DeleteBackupPlanRequest', -'DeleteBackupRequest', -'DeleteBackupVaultRequest', -'DeleteManagementServerRequest', -'DisplayDevice', -'Entry', -'FetchUsableBackupVaultsRequest', -'FetchUsableBackupVaultsResponse', -'GcpBackupConfig', -'GcpResource', -'GetBackupPlanAssociationRequest', -'GetBackupPlanRequest', -'GetBackupRequest', -'GetBackupVaultRequest', -'GetDataSourceRequest', -'GetManagementServerRequest', -'GuestOsFeature', -'InitializeServiceRequest', -'InitializeServiceResponse', -'InstanceParams', -'KeyRevocationActionType', -'ListBackupPlanAssociationsRequest', -'ListBackupPlanAssociationsResponse', -'ListBackupPlansRequest', -'ListBackupPlansResponse', -'ListBackupVaultsRequest', -'ListBackupVaultsResponse', -'ListBackupsRequest', -'ListBackupsResponse', -'ListDataSourcesRequest', -'ListDataSourcesResponse', -'ListManagementServersRequest', -'ListManagementServersResponse', -'ManagementServer', -'ManagementURI', -'Metadata', -'NetworkConfig', -'NetworkInterface', -'NetworkPerformanceConfig', -'OperationMetadata', -'RestoreBackupRequest', -'RestoreBackupResponse', -'RuleConfigInfo', -'Scheduling', -'SchedulingDuration', -'ServiceAccount', -'ServiceLockInfo', -'StandardSchedule', -'Tags', -'TargetResource', -'TriggerBackupRequest', -'UpdateBackupRequest', -'UpdateBackupVaultRequest', -'UpdateDataSourceRequest', -'WeekDayOfMonth', -'WorkforceIdentityBasedManagementURI', -'WorkforceIdentityBasedOAuth2ClientID', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/gapic_metadata.json deleted file mode 100644 index 5534a346d83c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/gapic_metadata.json +++ /dev/null @@ -1,448 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.backupdr_v1", - "protoPackage": "google.cloud.backupdr.v1", - "schema": "1.0", - "services": { - "BackupDR": { - "clients": { - "grpc": { - "libraryClient": "BackupDRClient", - "rpcs": { - "CreateBackupPlan": { - "methods": [ - "create_backup_plan" - ] - }, - "CreateBackupPlanAssociation": { - "methods": [ - "create_backup_plan_association" - ] - }, - "CreateBackupVault": { - "methods": [ - "create_backup_vault" - ] - }, - "CreateManagementServer": { - "methods": [ - "create_management_server" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupPlan": { - "methods": [ - "delete_backup_plan" - ] - }, - "DeleteBackupPlanAssociation": { - "methods": [ - "delete_backup_plan_association" - ] - }, - "DeleteBackupVault": { - "methods": [ - "delete_backup_vault" - ] - }, - "DeleteManagementServer": { - "methods": [ - "delete_management_server" - ] - }, - "FetchUsableBackupVaults": { - "methods": [ - "fetch_usable_backup_vaults" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupPlan": { - "methods": [ - "get_backup_plan" - ] - }, - "GetBackupPlanAssociation": { - "methods": [ - "get_backup_plan_association" - ] - }, - "GetBackupVault": { - "methods": [ - "get_backup_vault" - ] - }, - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "GetManagementServer": { - "methods": [ - "get_management_server" - ] - }, - "InitializeService": { - "methods": [ - "initialize_service" - ] - }, - "ListBackupPlanAssociations": { - "methods": [ - "list_backup_plan_associations" - ] - }, - "ListBackupPlans": { - "methods": [ - "list_backup_plans" - ] - }, - "ListBackupVaults": { - "methods": [ - "list_backup_vaults" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "ListManagementServers": { - "methods": [ - "list_management_servers" - ] - }, - "RestoreBackup": { - "methods": [ - "restore_backup" - ] - }, - "TriggerBackup": { - "methods": [ - "trigger_backup" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupVault": { - "methods": [ - "update_backup_vault" - ] - }, - "UpdateDataSource": { - "methods": [ - "update_data_source" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BackupDRAsyncClient", - "rpcs": { - "CreateBackupPlan": { - "methods": [ - "create_backup_plan" - ] - }, - "CreateBackupPlanAssociation": { - "methods": [ - "create_backup_plan_association" - ] - }, - "CreateBackupVault": { - "methods": [ - "create_backup_vault" - ] - }, - "CreateManagementServer": { - "methods": [ - "create_management_server" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupPlan": { - "methods": [ - "delete_backup_plan" - ] - }, - "DeleteBackupPlanAssociation": { - "methods": [ - "delete_backup_plan_association" - ] - }, - "DeleteBackupVault": { - "methods": [ - "delete_backup_vault" - ] - }, - "DeleteManagementServer": { - "methods": [ - "delete_management_server" - ] - }, - "FetchUsableBackupVaults": { - "methods": [ - "fetch_usable_backup_vaults" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupPlan": { - "methods": [ - "get_backup_plan" - ] - }, - "GetBackupPlanAssociation": { - "methods": [ - "get_backup_plan_association" - ] - }, - "GetBackupVault": { - "methods": [ - "get_backup_vault" - ] - }, - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "GetManagementServer": { - "methods": [ - "get_management_server" - ] - }, - "InitializeService": { - "methods": [ - "initialize_service" - ] - }, - "ListBackupPlanAssociations": { - "methods": [ - "list_backup_plan_associations" - ] - }, - "ListBackupPlans": { - "methods": [ - "list_backup_plans" - ] - }, - "ListBackupVaults": { - "methods": [ - "list_backup_vaults" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "ListManagementServers": { - "methods": [ - "list_management_servers" - ] - }, - "RestoreBackup": { - "methods": [ - "restore_backup" - ] - }, - "TriggerBackup": { - "methods": [ - "trigger_backup" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupVault": { - "methods": [ - "update_backup_vault" - ] - }, - "UpdateDataSource": { - "methods": [ - "update_data_source" - ] - } - } - }, - "rest": { - "libraryClient": "BackupDRClient", - "rpcs": { - "CreateBackupPlan": { - "methods": [ - "create_backup_plan" - ] - }, - "CreateBackupPlanAssociation": { - "methods": [ - "create_backup_plan_association" - ] - }, - "CreateBackupVault": { - "methods": [ - "create_backup_vault" - ] - }, - "CreateManagementServer": { - "methods": [ - "create_management_server" - ] - }, - "DeleteBackup": { - "methods": [ - "delete_backup" - ] - }, - "DeleteBackupPlan": { - "methods": [ - "delete_backup_plan" - ] - }, - "DeleteBackupPlanAssociation": { - "methods": [ - "delete_backup_plan_association" - ] - }, - "DeleteBackupVault": { - "methods": [ - "delete_backup_vault" - ] - }, - "DeleteManagementServer": { - "methods": [ - "delete_management_server" - ] - }, - "FetchUsableBackupVaults": { - "methods": [ - "fetch_usable_backup_vaults" - ] - }, - "GetBackup": { - "methods": [ - "get_backup" - ] - }, - "GetBackupPlan": { - "methods": [ - "get_backup_plan" - ] - }, - "GetBackupPlanAssociation": { - "methods": [ - "get_backup_plan_association" - ] - }, - "GetBackupVault": { - "methods": [ - "get_backup_vault" - ] - }, - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "GetManagementServer": { - "methods": [ - "get_management_server" - ] - }, - "InitializeService": { - "methods": [ - "initialize_service" - ] - }, - "ListBackupPlanAssociations": { - "methods": [ - "list_backup_plan_associations" - ] - }, - "ListBackupPlans": { - "methods": [ - "list_backup_plans" - ] - }, - "ListBackupVaults": { - "methods": [ - "list_backup_vaults" - ] - }, - "ListBackups": { - "methods": [ - "list_backups" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "ListManagementServers": { - "methods": [ - "list_management_servers" - ] - }, - "RestoreBackup": { - "methods": [ - "restore_backup" - ] - }, - "TriggerBackup": { - "methods": [ - "trigger_backup" - ] - }, - "UpdateBackup": { - "methods": [ - "update_backup" - ] - }, - "UpdateBackupVault": { - "methods": [ - "update_backup_vault" - ] - }, - "UpdateDataSource": { - "methods": [ - "update_data_source" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/gapic_version.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/py.typed b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/py.typed deleted file mode 100644 index 195a9f5c16ac..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-backupdr package uses inline types. diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/__init__.py deleted file mode 100644 index 268dca54bb7c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BackupDRClient -from .async_client import BackupDRAsyncClient - -__all__ = ( - 'BackupDRClient', - 'BackupDRAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/async_client.py deleted file mode 100644 index e0bfd78a8d6e..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ /dev/null @@ -1,4392 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.backupdr_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.cloud.backupdr_v1.types import backupvault_ba -from google.cloud.backupdr_v1.types import backupvault_gce -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from .transports.base import BackupDRTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BackupDRGrpcAsyncIOTransport -from .client import BackupDRClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class BackupDRAsyncClient: - """The BackupDR Service""" - - _client: BackupDRClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = BackupDRClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BackupDRClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = BackupDRClient._DEFAULT_UNIVERSE - - backup_path = staticmethod(BackupDRClient.backup_path) - parse_backup_path = staticmethod(BackupDRClient.parse_backup_path) - backup_plan_path = staticmethod(BackupDRClient.backup_plan_path) - parse_backup_plan_path = staticmethod(BackupDRClient.parse_backup_plan_path) - backup_plan_association_path = staticmethod(BackupDRClient.backup_plan_association_path) - parse_backup_plan_association_path = staticmethod(BackupDRClient.parse_backup_plan_association_path) - backup_vault_path = staticmethod(BackupDRClient.backup_vault_path) - parse_backup_vault_path = staticmethod(BackupDRClient.parse_backup_vault_path) - data_source_path = staticmethod(BackupDRClient.data_source_path) - parse_data_source_path = staticmethod(BackupDRClient.parse_data_source_path) - management_server_path = staticmethod(BackupDRClient.management_server_path) - parse_management_server_path = staticmethod(BackupDRClient.parse_management_server_path) - common_billing_account_path = staticmethod(BackupDRClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BackupDRClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BackupDRClient.common_folder_path) - parse_common_folder_path = staticmethod(BackupDRClient.parse_common_folder_path) - common_organization_path = staticmethod(BackupDRClient.common_organization_path) - parse_common_organization_path = staticmethod(BackupDRClient.parse_common_organization_path) - common_project_path = staticmethod(BackupDRClient.common_project_path) - parse_common_project_path = staticmethod(BackupDRClient.parse_common_project_path) - common_location_path = staticmethod(BackupDRClient.common_location_path) - parse_common_location_path = staticmethod(BackupDRClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BackupDRAsyncClient: The constructed client. - """ - return BackupDRClient.from_service_account_info.__func__(BackupDRAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BackupDRAsyncClient: The constructed client. - """ - return BackupDRClient.from_service_account_file.__func__(BackupDRAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BackupDRClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BackupDRTransport: - """Returns the transport used by the client instance. - - Returns: - BackupDRTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = BackupDRClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BackupDRTransport, Callable[..., BackupDRTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the backup dr async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BackupDRTransport,Callable[..., BackupDRTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BackupDRTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BackupDRClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.backupdr_v1.BackupDRAsyncClient`.", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "credentialsType": None, - } - ) - - async def list_management_servers(self, - request: Optional[Union[backupdr.ListManagementServersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListManagementServersAsyncPager: - r"""Lists ManagementServers in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_list_management_servers(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListManagementServersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_management_servers(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListManagementServersRequest, dict]]): - The request object. Request message for listing - management servers. - parent (:class:`str`): - Required. The project and location for which to retrieve - management servers information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - BackupDR, locations map to Google Cloud regions, for - example **us-central1**. To retrieve management servers - for all locations, use "-" for the '{location}' value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager: - Response message for listing - management servers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.ListManagementServersRequest): - request = backupdr.ListManagementServersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_management_servers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListManagementServersAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_management_server(self, - request: Optional[Union[backupdr.GetManagementServerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupdr.ManagementServer: - r"""Gets details of a single ManagementServer. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_get_management_server(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetManagementServerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_management_server(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetManagementServerRequest, dict]]): - The request object. Request message for getting a - management server instance. - name (:class:`str`): - Required. Name of the management server resource name, - in the format - 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.GetManagementServerRequest): - request = backupdr.GetManagementServerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_management_server] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_management_server(self, - request: Optional[Union[backupdr.CreateManagementServerRequest, dict]] = None, - *, - parent: Optional[str] = None, - management_server: Optional[backupdr.ManagementServer] = None, - management_server_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new ManagementServer in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_create_management_server(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateManagementServerRequest( - parent="parent_value", - management_server_id="management_server_id_value", - ) - - # Make the request - operation = client.create_management_server(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.CreateManagementServerRequest, dict]]): - The request object. Request message for creating a - management server instance. - parent (:class:`str`): - Required. The management server project and location in - the format 'projects/{project_id}/locations/{location}'. - In Cloud Backup and DR locations map to Google Cloud - regions, for example **us-central1**. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - management_server (:class:`google.cloud.backupdr_v1.types.ManagementServer`): - Required. A [management server - resource][google.cloud.backupdr.v1.ManagementServer] - - This corresponds to the ``management_server`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - management_server_id (:class:`str`): - Required. The name of the management - server to create. The name must be - unique for the specified project and - location. - - This corresponds to the ``management_server_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.ManagementServer` - ManagementServer describes a single BackupDR - ManagementServer instance. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, management_server, management_server_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.CreateManagementServerRequest): - request = backupdr.CreateManagementServerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if management_server is not None: - request.management_server = management_server - if management_server_id is not None: - request.management_server_id = management_server_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_management_server] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupdr.ManagementServer, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_management_server(self, - request: Optional[Union[backupdr.DeleteManagementServerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single ManagementServer. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_delete_management_server(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteManagementServerRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_management_server(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.DeleteManagementServerRequest, dict]]): - The request object. Request message for deleting a - management server instance. - name (:class:`str`): - Required. Name of the resource - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.DeleteManagementServerRequest): - request = backupdr.DeleteManagementServerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_management_server] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_backup_vault(self, - request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_vault: Optional[backupvault.BackupVault] = None, - backup_vault_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new BackupVault in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_create_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateBackupVaultRequest( - parent="parent_value", - backup_vault_id="backup_vault_id_value", - ) - - # Make the request - operation = client.create_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]]): - The request object. Message for creating a BackupVault. - parent (:class:`str`): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): - Required. The resource being created - This corresponds to the ``backup_vault`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_vault_id (:class:`str`): - Required. ID of the requesting object If auto-generating - ID server-side, remove this field and backup_vault_id - from the method_signature of Create RPC - - This corresponds to the ``backup_vault_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.BackupVault` - Message describing a BackupVault object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_vault, backup_vault_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.CreateBackupVaultRequest): - request = backupvault.CreateBackupVaultRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_vault is not None: - request.backup_vault = backup_vault - if backup_vault_id is not None: - request.backup_vault_id = backup_vault_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupvault.BackupVault, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_backup_vaults(self, - request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupVaultsAsyncPager: - r"""Lists BackupVaults in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_list_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_vaults(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]]): - The request object. Request message for listing - backupvault stores. - parent (:class:`str`): - Required. The project and location for which to retrieve - backupvault stores information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve backupvault - stores for all locations, use "-" for the '{location}' - value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager: - Response message for listing - BackupVaults. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.ListBackupVaultsRequest): - request = backupvault.ListBackupVaultsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_vaults] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupVaultsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def fetch_usable_backup_vaults(self, - request: Optional[Union[backupvault.FetchUsableBackupVaultsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.FetchUsableBackupVaultsAsyncPager: - r"""FetchUsableBackupVaults lists usable BackupVaults in - a given project and location. Usable BackupVault are the - ones that user has backupdr.backupVaults.get permission. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_fetch_usable_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.FetchUsableBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.fetch_usable_backup_vaults(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]]): - The request object. Request message for fetching usable - BackupVaults. - parent (:class:`str`): - Required. The project and location for which to retrieve - backupvault stores information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve backupvault - stores for all locations, use "-" for the '{location}' - value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager: - Response message for fetching usable - BackupVaults. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): - request = backupvault.FetchUsableBackupVaultsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.fetch_usable_backup_vaults] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.FetchUsableBackupVaultsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_backup_vault(self, - request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.BackupVault: - r"""Gets details of a BackupVault. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_get_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupVaultRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_vault(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]]): - The request object. Request message for getting a - BackupVault. - name (:class:`str`): - Required. Name of the backupvault store resource name, - in the format - 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.BackupVault: - Message describing a BackupVault - object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.GetBackupVaultRequest): - request = backupvault.GetBackupVaultRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup_vault(self, - request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, - *, - backup_vault: Optional[backupvault.BackupVault] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the settings of a BackupVault. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_update_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupVaultRequest( - ) - - # Make the request - operation = client.update_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]]): - The request object. Request message for updating a - BackupVault. - backup_vault (:class:`google.cloud.backupdr_v1.types.BackupVault`): - Required. The resource being updated - This corresponds to the ``backup_vault`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the BackupVault resource by the update. - The fields specified in the update_mask are relative to - the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then the request will fail. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.BackupVault` - Message describing a BackupVault object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_vault, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.UpdateBackupVaultRequest): - request = backupvault.UpdateBackupVaultRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_vault is not None: - request.backup_vault = backup_vault - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_vault.name", request.backup_vault.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupvault.BackupVault, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_backup_vault(self, - request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a BackupVault. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_delete_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupVaultRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]]): - The request object. Message for deleting a BackupVault. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.DeleteBackupVaultRequest): - request = backupvault.DeleteBackupVaultRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_data_sources(self, - request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataSourcesAsyncPager: - r"""Lists DataSources in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_list_data_sources(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]]): - The request object. Request message for listing - DataSources. - parent (:class:`str`): - Required. The project and location for which to retrieve - data sources information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve data sources - for all locations, use "-" for the '{location}' value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager: - Response message for listing - DataSources. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.ListDataSourcesRequest): - request = backupvault.ListDataSourcesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataSourcesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_source(self, - request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.DataSource: - r"""Gets details of a DataSource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_get_data_source(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_source(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]]): - The request object. Request message for getting a - DataSource instance. - name (:class:`str`): - Required. Name of the data source resource name, in the - format - 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.DataSource: - Message describing a DataSource - object. Datasource object used to - represent Datasource details for both - admin and basic view. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.GetDataSourceRequest): - request = backupvault.GetDataSourceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_source] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_data_source(self, - request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, - *, - data_source: Optional[backupvault.DataSource] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the settings of a DataSource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_update_data_source(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateDataSourceRequest( - ) - - # Make the request - operation = client.update_data_source(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]]): - The request object. Request message for updating a data - source instance. - data_source (:class:`google.cloud.backupdr_v1.types.DataSource`): - Required. The resource being updated - This corresponds to the ``data_source`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the DataSource resource by the update. - The fields specified in the update_mask are relative to - the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then the request will fail. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. - Datasource object used to represent Datasource - details for both admin and basic view. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_source, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.UpdateDataSourceRequest): - request = backupvault.UpdateDataSourceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_source is not None: - request.data_source = data_source - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_source] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_source.name", request.data_source.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupvault.DataSource, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_backups(self, - request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupsAsyncPager: - r"""Lists Backups in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_list_backups(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]]): - The request object. Request message for listing Backups. - parent (:class:`str`): - Required. The project and location for which to retrieve - backup information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve data sources - for all locations, use "-" for the '{location}' value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager: - Response message for listing Backups. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.ListBackupsRequest): - request = backupvault.ListBackupsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_backup(self, - request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.Backup: - r"""Gets details of a Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_get_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]]): - The request object. Request message for getting a Backup. - name (:class:`str`): - Required. Name of the data source resource name, in the - format - 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.Backup: - Message describing a Backup object. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.GetBackupRequest): - request = backupvault.GetBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_backup(self, - request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, - *, - backup: Optional[backupvault.Backup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the settings of a Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_update_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupRequest( - ) - - # Make the request - operation = client.update_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]]): - The request object. Request message for updating a - Backup. - backup (:class:`google.cloud.backupdr_v1.types.Backup`): - Required. The resource being updated - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the Backup resource by the update. The - fields specified in the update_mask are relative to the - resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then the request will fail. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.Backup` Message - describing a Backup object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.UpdateBackupRequest): - request = backupvault.UpdateBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupvault.Backup, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_backup(self, - request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_delete_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]]): - The request object. Message for deleting a Backup. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.Backup` Message - describing a Backup object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.DeleteBackupRequest): - request = backupvault.DeleteBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupvault.Backup, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def restore_backup(self, - request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Restore from a Backup - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_restore_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() - compute_instance_target_environment.project = "project_value" - compute_instance_target_environment.zone = "zone_value" - - request = backupdr_v1.RestoreBackupRequest( - compute_instance_target_environment=compute_instance_target_environment, - name="name_value", - ) - - # Make the request - operation = client.restore_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]]): - The request object. Request message for restoring from a - Backup. - name (:class:`str`): - Required. The resource name of the Backup instance, in - the format - 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` - Response message for restoring from a Backup. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.RestoreBackupRequest): - request = backupvault.RestoreBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.restore_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupvault.RestoreBackupResponse, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_backup_plan(self, - request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_plan: Optional[backupplan.BackupPlan] = None, - backup_plan_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a BackupPlan - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_create_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - backup_plan = backupdr_v1.BackupPlan() - backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" - backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 - backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 - backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" - backup_plan.backup_rules.rule_id = "rule_id_value" - backup_plan.backup_rules.backup_retention_days = 2237 - backup_plan.resource_type = "resource_type_value" - backup_plan.backup_vault = "backup_vault_value" - - request = backupdr_v1.CreateBackupPlanRequest( - parent="parent_value", - backup_plan_id="backup_plan_id_value", - backup_plan=backup_plan, - ) - - # Make the request - operation = client.create_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]]): - The request object. The request message for creating a ``BackupPlan``. - parent (:class:`str`): - Required. The ``BackupPlan`` project and location in the - format ``projects/{project}/locations/{location}``. In - Cloud BackupDR locations map to GCP regions, for example - **us-central1**. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan (:class:`google.cloud.backupdr_v1.types.BackupPlan`): - Required. The ``BackupPlan`` resource object to create. - This corresponds to the ``backup_plan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_id (:class:`str`): - Required. The name of the ``BackupPlan`` to create. The - name must be unique for the specified project and - location.The name must start with a lowercase letter - followed by up to 62 lowercase letters, numbers, or - hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. - - This corresponds to the ``backup_plan_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well - as one or more BackupRule messages. Each BackupRule - has a retention policy and defines a schedule by - which the system is to perform backup workloads. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_plan, backup_plan_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.CreateBackupPlanRequest): - request = backupplan.CreateBackupPlanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_plan is not None: - request.backup_plan = backup_plan - if backup_plan_id is not None: - request.backup_plan_id = backup_plan_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_plan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupplan.BackupPlan, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_backup_plan(self, - request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplan.BackupPlan: - r"""Gets details of a single BackupPlan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_get_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_plan(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]]): - The request object. The request message for getting a ``BackupPlan``. - name (:class:`str`): - Required. The resource name of the ``BackupPlan`` to - retrieve. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.BackupPlan: - A BackupPlan specifies some common fields, such as description as well - as one or more BackupRule messages. Each BackupRule - has a retention policy and defines a schedule by - which the system is to perform backup workloads. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.GetBackupPlanRequest): - request = backupplan.GetBackupPlanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_plan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_plans(self, - request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupPlansAsyncPager: - r"""Lists BackupPlans in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_list_backup_plans(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plans(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]]): - The request object. The request message for getting a list ``BackupPlan``. - parent (:class:`str`): - Required. The project and location for which to retrieve - ``BackupPlans`` information. Format: - ``projects/{project}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for e.g. - **us-central1**. To retrieve backup plans for all - locations, use "-" for the ``{location}`` value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager: - The response message for getting a list of BackupPlan. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.ListBackupPlansRequest): - request = backupplan.ListBackupPlansRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_plans] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupPlansAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup_plan(self, - request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single BackupPlan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_delete_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]]): - The request object. The request message for deleting a ``BackupPlan``. - name (:class:`str`): - Required. The resource name of the ``BackupPlan`` to - delete. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.DeleteBackupPlanRequest): - request = backupplan.DeleteBackupPlanRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_plan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_backup_plan_association(self, - request: Optional[Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_plan_association: Optional[backupplanassociation.BackupPlanAssociation] = None, - backup_plan_association_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a BackupPlanAssociation - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_create_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - backup_plan_association = backupdr_v1.BackupPlanAssociation() - backup_plan_association.resource_type = "resource_type_value" - backup_plan_association.resource = "resource_value" - backup_plan_association.backup_plan = "backup_plan_value" - - request = backupdr_v1.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", - backup_plan_association=backup_plan_association, - ) - - # Make the request - operation = client.create_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): - The request object. Request message for creating a backup - plan. - parent (:class:`str`): - Required. The backup plan association project and - location in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR locations map to GCP regions, for example - **us-central1**. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): - Required. The resource being created - This corresponds to the ``backup_plan_association`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association_id (:class:`str`): - Required. The name of the backup plan - association to create. The name must be - unique for the specified project and - location. - - This corresponds to the ``backup_plan_association_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which - contains details like workload, backup plan etc - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_plan_association, backup_plan_association_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.CreateBackupPlanAssociationRequest): - request = backupplanassociation.CreateBackupPlanAssociationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_plan_association is not None: - request.backup_plan_association = backup_plan_association - if backup_plan_association_id is not None: - request.backup_plan_association_id = backup_plan_association_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_backup_plan_association] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupplanassociation.BackupPlanAssociation, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_backup_plan_association(self, - request: Optional[Union[backupplanassociation.GetBackupPlanAssociationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplanassociation.BackupPlanAssociation: - r"""Gets details of a single BackupPlanAssociation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_get_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_plan_association(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): - The request object. Request message for getting a - BackupPlanAssociation resource. - name (:class:`str`): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.BackupPlanAssociation: - A BackupPlanAssociation represents a - single BackupPlanAssociation which - contains details like workload, backup - plan etc - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.GetBackupPlanAssociationRequest): - request = backupplanassociation.GetBackupPlanAssociationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_backup_plan_association] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_backup_plan_associations(self, - request: Optional[Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupPlanAssociationsAsyncPager: - r"""Lists BackupPlanAssociations in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_list_backup_plan_associations(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlanAssociationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plan_associations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): - The request object. Request message for List - BackupPlanAssociation - parent (:class:`str`): - Required. The project and location for which to retrieve - backup Plan Associations information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve backup plan associations - for all locations, use "-" for the ``{location}`` value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: - Response message for List - BackupPlanAssociation - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.ListBackupPlanAssociationsRequest): - request = backupplanassociation.ListBackupPlanAssociationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_backup_plan_associations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBackupPlanAssociationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_backup_plan_association(self, - request: Optional[Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single BackupPlanAssociation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_delete_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]]): - The request object. Request message for deleting a backup - plan association. - name (:class:`str`): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.DeleteBackupPlanAssociationRequest): - request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_backup_plan_association] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def trigger_backup(self, - request: Optional[Union[backupplanassociation.TriggerBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - rule_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Triggers a new Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_trigger_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.TriggerBackupRequest( - name="name_value", - rule_id="rule_id_value", - ) - - # Make the request - operation = client.trigger_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]]): - The request object. Request message for triggering a - backup. - name (:class:`str`): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - rule_id (:class:`str`): - Required. backup rule_id for which a backup needs to be - triggered. - - This corresponds to the ``rule_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which - contains details like workload, backup plan etc - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, rule_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.TriggerBackupRequest): - request = backupplanassociation.TriggerBackupRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if rule_id is not None: - request.rule_id = rule_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.trigger_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupplanassociation.BackupPlanAssociation, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def initialize_service(self, - request: Optional[Union[backupdr.InitializeServiceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Initializes the service related config for a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - async def sample_initialize_service(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.InitializeServiceRequest( - name="name_value", - resource_type="resource_type_value", - ) - - # Make the request - operation = client.initialize_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.backupdr_v1.types.InitializeServiceRequest, dict]]): - The request object. Request message for initializing the - service. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.InitializeServiceResponse` - Response message for initializing the service. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.InitializeServiceRequest): - request = backupdr.InitializeServiceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.initialize_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupdr.InitializeServiceResponse, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "BackupDRAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BackupDRAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/client.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/client.py deleted file mode 100644 index 0c48fd9d1345..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/client.py +++ /dev/null @@ -1,4795 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.backupdr_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.cloud.backupdr_v1.types import backupvault_ba -from google.cloud.backupdr_v1.types import backupvault_gce -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from .transports.base import BackupDRTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BackupDRGrpcTransport -from .transports.grpc_asyncio import BackupDRGrpcAsyncIOTransport -from .transports.rest import BackupDRRestTransport - - -class BackupDRClientMeta(type): - """Metaclass for the BackupDR client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BackupDRTransport]] - _transport_registry["grpc"] = BackupDRGrpcTransport - _transport_registry["grpc_asyncio"] = BackupDRGrpcAsyncIOTransport - _transport_registry["rest"] = BackupDRRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BackupDRTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BackupDRClient(metaclass=BackupDRClientMeta): - """The BackupDR Service""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "backupdr.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "backupdr.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BackupDRClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BackupDRClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BackupDRTransport: - """Returns the transport used by the client instance. - - Returns: - BackupDRTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def backup_path(project: str,location: str,backupvault: str,datasource: str,backup: str,) -> str: - """Returns a fully-qualified backup string.""" - return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format(project=project, location=location, backupvault=backupvault, datasource=datasource, backup=backup, ) - - @staticmethod - def parse_backup_path(path: str) -> Dict[str,str]: - """Parses a backup path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)/backups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def backup_plan_path(project: str,location: str,backup_plan: str,) -> str: - """Returns a fully-qualified backup_plan string.""" - return "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format(project=project, location=location, backup_plan=backup_plan, ) - - @staticmethod - def parse_backup_plan_path(path: str) -> Dict[str,str]: - """Parses a backup_plan path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backupPlans/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def backup_plan_association_path(project: str,location: str,backup_plan_association: str,) -> str: - """Returns a fully-qualified backup_plan_association string.""" - return "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format(project=project, location=location, backup_plan_association=backup_plan_association, ) - - @staticmethod - def parse_backup_plan_association_path(path: str) -> Dict[str,str]: - """Parses a backup_plan_association path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backupPlanAssociations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def backup_vault_path(project: str,location: str,backupvault: str,) -> str: - """Returns a fully-qualified backup_vault string.""" - return "projects/{project}/locations/{location}/backupVaults/{backupvault}".format(project=project, location=location, backupvault=backupvault, ) - - @staticmethod - def parse_backup_vault_path(path: str) -> Dict[str,str]: - """Parses a backup_vault path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def data_source_path(project: str,location: str,backupvault: str,datasource: str,) -> str: - """Returns a fully-qualified data_source string.""" - return "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format(project=project, location=location, backupvault=backupvault, datasource=datasource, ) - - @staticmethod - def parse_data_source_path(path: str) -> Dict[str,str]: - """Parses a data_source path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/backupVaults/(?P.+?)/dataSources/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def management_server_path(project: str,location: str,managementserver: str,) -> str: - """Returns a fully-qualified management_server string.""" - return "projects/{project}/locations/{location}/managementServers/{managementserver}".format(project=project, location=location, managementserver=managementserver, ) - - @staticmethod - def parse_management_server_path(path: str) -> Dict[str,str]: - """Parses a management_server path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/managementServers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = BackupDRClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = BackupDRClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = BackupDRClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BackupDRTransport, Callable[..., BackupDRTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the backup dr client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BackupDRTransport,Callable[..., BackupDRTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BackupDRTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BackupDRClient._read_environment_variables() - self._client_cert_source = BackupDRClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BackupDRClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, BackupDRTransport) - if transport_provided: - # transport is a BackupDRTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(BackupDRTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - BackupDRClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[BackupDRTransport], Callable[..., BackupDRTransport]] = ( - BackupDRClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., BackupDRTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.backupdr_v1.BackupDRClient`.", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "credentialsType": None, - } - ) - - def list_management_servers(self, - request: Optional[Union[backupdr.ListManagementServersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListManagementServersPager: - r"""Lists ManagementServers in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_list_management_servers(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListManagementServersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_management_servers(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.ListManagementServersRequest, dict]): - The request object. Request message for listing - management servers. - parent (str): - Required. The project and location for which to retrieve - management servers information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - BackupDR, locations map to Google Cloud regions, for - example **us-central1**. To retrieve management servers - for all locations, use "-" for the '{location}' value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager: - Response message for listing - management servers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.ListManagementServersRequest): - request = backupdr.ListManagementServersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_management_servers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListManagementServersPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_management_server(self, - request: Optional[Union[backupdr.GetManagementServerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupdr.ManagementServer: - r"""Gets details of a single ManagementServer. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_get_management_server(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetManagementServerRequest( - name="name_value", - ) - - # Make the request - response = client.get_management_server(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.GetManagementServerRequest, dict]): - The request object. Request message for getting a - management server instance. - name (str): - Required. Name of the management server resource name, - in the format - 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.GetManagementServerRequest): - request = backupdr.GetManagementServerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_management_server] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_management_server(self, - request: Optional[Union[backupdr.CreateManagementServerRequest, dict]] = None, - *, - parent: Optional[str] = None, - management_server: Optional[backupdr.ManagementServer] = None, - management_server_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new ManagementServer in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_create_management_server(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateManagementServerRequest( - parent="parent_value", - management_server_id="management_server_id_value", - ) - - # Make the request - operation = client.create_management_server(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.CreateManagementServerRequest, dict]): - The request object. Request message for creating a - management server instance. - parent (str): - Required. The management server project and location in - the format 'projects/{project_id}/locations/{location}'. - In Cloud Backup and DR locations map to Google Cloud - regions, for example **us-central1**. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - management_server (google.cloud.backupdr_v1.types.ManagementServer): - Required. A [management server - resource][google.cloud.backupdr.v1.ManagementServer] - - This corresponds to the ``management_server`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - management_server_id (str): - Required. The name of the management - server to create. The name must be - unique for the specified project and - location. - - This corresponds to the ``management_server_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.ManagementServer` - ManagementServer describes a single BackupDR - ManagementServer instance. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, management_server, management_server_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.CreateManagementServerRequest): - request = backupdr.CreateManagementServerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if management_server is not None: - request.management_server = management_server - if management_server_id is not None: - request.management_server_id = management_server_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_management_server] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupdr.ManagementServer, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_management_server(self, - request: Optional[Union[backupdr.DeleteManagementServerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single ManagementServer. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_delete_management_server(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteManagementServerRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_management_server(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.DeleteManagementServerRequest, dict]): - The request object. Request message for deleting a - management server instance. - name (str): - Required. Name of the resource - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.DeleteManagementServerRequest): - request = backupdr.DeleteManagementServerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_management_server] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_backup_vault(self, - request: Optional[Union[backupvault.CreateBackupVaultRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_vault: Optional[backupvault.BackupVault] = None, - backup_vault_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new BackupVault in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_create_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateBackupVaultRequest( - parent="parent_value", - backup_vault_id="backup_vault_id_value", - ) - - # Make the request - operation = client.create_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.CreateBackupVaultRequest, dict]): - The request object. Message for creating a BackupVault. - parent (str): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_vault (google.cloud.backupdr_v1.types.BackupVault): - Required. The resource being created - This corresponds to the ``backup_vault`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_vault_id (str): - Required. ID of the requesting object If auto-generating - ID server-side, remove this field and backup_vault_id - from the method_signature of Create RPC - - This corresponds to the ``backup_vault_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.BackupVault` - Message describing a BackupVault object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_vault, backup_vault_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.CreateBackupVaultRequest): - request = backupvault.CreateBackupVaultRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_vault is not None: - request.backup_vault = backup_vault - if backup_vault_id is not None: - request.backup_vault_id = backup_vault_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupvault.BackupVault, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_backup_vaults(self, - request: Optional[Union[backupvault.ListBackupVaultsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupVaultsPager: - r"""Lists BackupVaults in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_list_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_vaults(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.ListBackupVaultsRequest, dict]): - The request object. Request message for listing - backupvault stores. - parent (str): - Required. The project and location for which to retrieve - backupvault stores information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve backupvault - stores for all locations, use "-" for the '{location}' - value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager: - Response message for listing - BackupVaults. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.ListBackupVaultsRequest): - request = backupvault.ListBackupVaultsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_vaults] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupVaultsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def fetch_usable_backup_vaults(self, - request: Optional[Union[backupvault.FetchUsableBackupVaultsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.FetchUsableBackupVaultsPager: - r"""FetchUsableBackupVaults lists usable BackupVaults in - a given project and location. Usable BackupVault are the - ones that user has backupdr.backupVaults.get permission. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_fetch_usable_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.FetchUsableBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.fetch_usable_backup_vaults(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest, dict]): - The request object. Request message for fetching usable - BackupVaults. - parent (str): - Required. The project and location for which to retrieve - backupvault stores information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve backupvault - stores for all locations, use "-" for the '{location}' - value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager: - Response message for fetching usable - BackupVaults. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.FetchUsableBackupVaultsRequest): - request = backupvault.FetchUsableBackupVaultsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.fetch_usable_backup_vaults] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.FetchUsableBackupVaultsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_backup_vault(self, - request: Optional[Union[backupvault.GetBackupVaultRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.BackupVault: - r"""Gets details of a BackupVault. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_get_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupVaultRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_vault(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.GetBackupVaultRequest, dict]): - The request object. Request message for getting a - BackupVault. - name (str): - Required. Name of the backupvault store resource name, - in the format - 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.BackupVault: - Message describing a BackupVault - object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.GetBackupVaultRequest): - request = backupvault.GetBackupVaultRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup_vault(self, - request: Optional[Union[backupvault.UpdateBackupVaultRequest, dict]] = None, - *, - backup_vault: Optional[backupvault.BackupVault] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the settings of a BackupVault. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_update_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupVaultRequest( - ) - - # Make the request - operation = client.update_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.UpdateBackupVaultRequest, dict]): - The request object. Request message for updating a - BackupVault. - backup_vault (google.cloud.backupdr_v1.types.BackupVault): - Required. The resource being updated - This corresponds to the ``backup_vault`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the BackupVault resource by the update. - The fields specified in the update_mask are relative to - the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then the request will fail. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.BackupVault` - Message describing a BackupVault object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup_vault, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.UpdateBackupVaultRequest): - request = backupvault.UpdateBackupVaultRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup_vault is not None: - request.backup_vault = backup_vault - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup_vault.name", request.backup_vault.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupvault.BackupVault, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_backup_vault(self, - request: Optional[Union[backupvault.DeleteBackupVaultRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a BackupVault. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_delete_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupVaultRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.DeleteBackupVaultRequest, dict]): - The request object. Message for deleting a BackupVault. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.DeleteBackupVaultRequest): - request = backupvault.DeleteBackupVaultRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_vault] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_data_sources(self, - request: Optional[Union[backupvault.ListDataSourcesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataSourcesPager: - r"""Lists DataSources in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_list_data_sources(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.ListDataSourcesRequest, dict]): - The request object. Request message for listing - DataSources. - parent (str): - Required. The project and location for which to retrieve - data sources information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve data sources - for all locations, use "-" for the '{location}' value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager: - Response message for listing - DataSources. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.ListDataSourcesRequest): - request = backupvault.ListDataSourcesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataSourcesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_source(self, - request: Optional[Union[backupvault.GetDataSourceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.DataSource: - r"""Gets details of a DataSource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_get_data_source(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_source(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.GetDataSourceRequest, dict]): - The request object. Request message for getting a - DataSource instance. - name (str): - Required. Name of the data source resource name, in the - format - 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.DataSource: - Message describing a DataSource - object. Datasource object used to - represent Datasource details for both - admin and basic view. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.GetDataSourceRequest): - request = backupvault.GetDataSourceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_source] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_data_source(self, - request: Optional[Union[backupvault.UpdateDataSourceRequest, dict]] = None, - *, - data_source: Optional[backupvault.DataSource] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the settings of a DataSource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_update_data_source(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateDataSourceRequest( - ) - - # Make the request - operation = client.update_data_source(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.UpdateDataSourceRequest, dict]): - The request object. Request message for updating a data - source instance. - data_source (google.cloud.backupdr_v1.types.DataSource): - Required. The resource being updated - This corresponds to the ``data_source`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the DataSource resource by the update. - The fields specified in the update_mask are relative to - the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then the request will fail. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.DataSource` Message describing a DataSource object. - Datasource object used to represent Datasource - details for both admin and basic view. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_source, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.UpdateDataSourceRequest): - request = backupvault.UpdateDataSourceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_source is not None: - request.data_source = data_source - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_source] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_source.name", request.data_source.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupvault.DataSource, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_backups(self, - request: Optional[Union[backupvault.ListBackupsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupsPager: - r"""Lists Backups in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_list_backups(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.ListBackupsRequest, dict]): - The request object. Request message for listing Backups. - parent (str): - Required. The project and location for which to retrieve - backup information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, - for example **us-central1**. To retrieve data sources - for all locations, use "-" for the '{location}' value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager: - Response message for listing Backups. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.ListBackupsRequest): - request = backupvault.ListBackupsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backups] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_backup(self, - request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.Backup: - r"""Gets details of a Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_get_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.GetBackupRequest, dict]): - The request object. Request message for getting a Backup. - name (str): - Required. Name of the data source resource name, in the - format - 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.Backup: - Message describing a Backup object. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.GetBackupRequest): - request = backupvault.GetBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_backup(self, - request: Optional[Union[backupvault.UpdateBackupRequest, dict]] = None, - *, - backup: Optional[backupvault.Backup] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the settings of a Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_update_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupRequest( - ) - - # Make the request - operation = client.update_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.UpdateBackupRequest, dict]): - The request object. Request message for updating a - Backup. - backup (google.cloud.backupdr_v1.types.Backup): - Required. The resource being updated - This corresponds to the ``backup`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the Backup resource by the update. The - fields specified in the update_mask are relative to the - resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then the request will fail. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.Backup` Message - describing a Backup object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [backup, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.UpdateBackupRequest): - request = backupvault.UpdateBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if backup is not None: - request.backup = backup - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("backup.name", request.backup.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupvault.Backup, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_backup(self, - request: Optional[Union[backupvault.DeleteBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_delete_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.DeleteBackupRequest, dict]): - The request object. Message for deleting a Backup. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.Backup` Message - describing a Backup object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.DeleteBackupRequest): - request = backupvault.DeleteBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupvault.Backup, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def restore_backup(self, - request: Optional[Union[backupvault.RestoreBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Restore from a Backup - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_restore_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() - compute_instance_target_environment.project = "project_value" - compute_instance_target_environment.zone = "zone_value" - - request = backupdr_v1.RestoreBackupRequest( - compute_instance_target_environment=compute_instance_target_environment, - name="name_value", - ) - - # Make the request - operation = client.restore_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.RestoreBackupRequest, dict]): - The request object. Request message for restoring from a - Backup. - name (str): - Required. The resource name of the Backup instance, in - the format - 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.RestoreBackupResponse` - Response message for restoring from a Backup. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupvault.RestoreBackupRequest): - request = backupvault.RestoreBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupvault.RestoreBackupResponse, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_backup_plan(self, - request: Optional[Union[backupplan.CreateBackupPlanRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_plan: Optional[backupplan.BackupPlan] = None, - backup_plan_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a BackupPlan - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_create_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - backup_plan = backupdr_v1.BackupPlan() - backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" - backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 - backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 - backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" - backup_plan.backup_rules.rule_id = "rule_id_value" - backup_plan.backup_rules.backup_retention_days = 2237 - backup_plan.resource_type = "resource_type_value" - backup_plan.backup_vault = "backup_vault_value" - - request = backupdr_v1.CreateBackupPlanRequest( - parent="parent_value", - backup_plan_id="backup_plan_id_value", - backup_plan=backup_plan, - ) - - # Make the request - operation = client.create_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanRequest, dict]): - The request object. The request message for creating a ``BackupPlan``. - parent (str): - Required. The ``BackupPlan`` project and location in the - format ``projects/{project}/locations/{location}``. In - Cloud BackupDR locations map to GCP regions, for example - **us-central1**. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan (google.cloud.backupdr_v1.types.BackupPlan): - Required. The ``BackupPlan`` resource object to create. - This corresponds to the ``backup_plan`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_id (str): - Required. The name of the ``BackupPlan`` to create. The - name must be unique for the specified project and - location.The name must start with a lowercase letter - followed by up to 62 lowercase letters, numbers, or - hyphens. Pattern, /[a-z][a-z0-9-]{,62}/. - - This corresponds to the ``backup_plan_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well - as one or more BackupRule messages. Each BackupRule - has a retention policy and defines a schedule by - which the system is to perform backup workloads. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_plan, backup_plan_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.CreateBackupPlanRequest): - request = backupplan.CreateBackupPlanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_plan is not None: - request.backup_plan = backup_plan - if backup_plan_id is not None: - request.backup_plan_id = backup_plan_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_plan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupplan.BackupPlan, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_backup_plan(self, - request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplan.BackupPlan: - r"""Gets details of a single BackupPlan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_get_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_plan(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.GetBackupPlanRequest, dict]): - The request object. The request message for getting a ``BackupPlan``. - name (str): - Required. The resource name of the ``BackupPlan`` to - retrieve. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.BackupPlan: - A BackupPlan specifies some common fields, such as description as well - as one or more BackupRule messages. Each BackupRule - has a retention policy and defines a schedule by - which the system is to perform backup workloads. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.GetBackupPlanRequest): - request = backupplan.GetBackupPlanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_plan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_plans(self, - request: Optional[Union[backupplan.ListBackupPlansRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupPlansPager: - r"""Lists BackupPlans in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_list_backup_plans(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plans(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.ListBackupPlansRequest, dict]): - The request object. The request message for getting a list ``BackupPlan``. - parent (str): - Required. The project and location for which to retrieve - ``BackupPlans`` information. Format: - ``projects/{project}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for e.g. - **us-central1**. To retrieve backup plans for all - locations, use "-" for the ``{location}`` value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager: - The response message for getting a list of BackupPlan. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.ListBackupPlansRequest): - request = backupplan.ListBackupPlansRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_plans] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupPlansPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup_plan(self, - request: Optional[Union[backupplan.DeleteBackupPlanRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single BackupPlan. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_delete_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanRequest, dict]): - The request object. The request message for deleting a ``BackupPlan``. - name (str): - Required. The resource name of the ``BackupPlan`` to - delete. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplan.DeleteBackupPlanRequest): - request = backupplan.DeleteBackupPlanRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_plan] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_backup_plan_association(self, - request: Optional[Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict]] = None, - *, - parent: Optional[str] = None, - backup_plan_association: Optional[backupplanassociation.BackupPlanAssociation] = None, - backup_plan_association_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a BackupPlanAssociation - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_create_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - backup_plan_association = backupdr_v1.BackupPlanAssociation() - backup_plan_association.resource_type = "resource_type_value" - backup_plan_association.resource = "resource_value" - backup_plan_association.backup_plan = "backup_plan_value" - - request = backupdr_v1.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", - backup_plan_association=backup_plan_association, - ) - - # Make the request - operation = client.create_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): - The request object. Request message for creating a backup - plan. - parent (str): - Required. The backup plan association project and - location in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR locations map to GCP regions, for example - **us-central1**. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): - Required. The resource being created - This corresponds to the ``backup_plan_association`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association_id (str): - Required. The name of the backup plan - association to create. The name must be - unique for the specified project and - location. - - This corresponds to the ``backup_plan_association_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which - contains details like workload, backup plan etc - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_plan_association, backup_plan_association_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.CreateBackupPlanAssociationRequest): - request = backupplanassociation.CreateBackupPlanAssociationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if backup_plan_association is not None: - request.backup_plan_association = backup_plan_association - if backup_plan_association_id is not None: - request.backup_plan_association_id = backup_plan_association_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_backup_plan_association] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupplanassociation.BackupPlanAssociation, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_backup_plan_association(self, - request: Optional[Union[backupplanassociation.GetBackupPlanAssociationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplanassociation.BackupPlanAssociation: - r"""Gets details of a single BackupPlanAssociation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_get_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_plan_association(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]): - The request object. Request message for getting a - BackupPlanAssociation resource. - name (str): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.types.BackupPlanAssociation: - A BackupPlanAssociation represents a - single BackupPlanAssociation which - contains details like workload, backup - plan etc - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.GetBackupPlanAssociationRequest): - request = backupplanassociation.GetBackupPlanAssociationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_backup_plan_association] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_backup_plan_associations(self, - request: Optional[Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupPlanAssociationsPager: - r"""Lists BackupPlanAssociations in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_list_backup_plan_associations(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlanAssociationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plan_associations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): - The request object. Request message for List - BackupPlanAssociation - parent (str): - Required. The project and location for which to retrieve - backup Plan Associations information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve backup plan associations - for all locations, use "-" for the ``{location}`` value. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: - Response message for List - BackupPlanAssociation - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.ListBackupPlanAssociationsRequest): - request = backupplanassociation.ListBackupPlanAssociationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_backup_plan_associations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBackupPlanAssociationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_backup_plan_association(self, - request: Optional[Union[backupplanassociation.DeleteBackupPlanAssociationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single BackupPlanAssociation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_delete_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest, dict]): - The request object. Request message for deleting a backup - plan association. - name (str): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.DeleteBackupPlanAssociationRequest): - request = backupplanassociation.DeleteBackupPlanAssociationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_backup_plan_association] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def trigger_backup(self, - request: Optional[Union[backupplanassociation.TriggerBackupRequest, dict]] = None, - *, - name: Optional[str] = None, - rule_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Triggers a new Backup. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_trigger_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.TriggerBackupRequest( - name="name_value", - rule_id="rule_id_value", - ) - - # Make the request - operation = client.trigger_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.TriggerBackupRequest, dict]): - The request object. Request message for triggering a - backup. - name (str): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - rule_id (str): - Required. backup rule_id for which a backup needs to be - triggered. - - This corresponds to the ``rule_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which - contains details like workload, backup plan etc - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, rule_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupplanassociation.TriggerBackupRequest): - request = backupplanassociation.TriggerBackupRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if rule_id is not None: - request.rule_id = rule_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.trigger_backup] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupplanassociation.BackupPlanAssociation, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def initialize_service(self, - request: Optional[Union[backupdr.InitializeServiceRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Initializes the service related config for a project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import backupdr_v1 - - def sample_initialize_service(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.InitializeServiceRequest( - name="name_value", - resource_type="resource_type_value", - ) - - # Make the request - operation = client.initialize_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.backupdr_v1.types.InitializeServiceRequest, dict]): - The request object. Request message for initializing the - service. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.backupdr_v1.types.InitializeServiceResponse` - Response message for initializing the service. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, backupdr.InitializeServiceRequest): - request = backupdr.InitializeServiceRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.initialize_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - backupdr.InitializeServiceResponse, - metadata_type=backupdr.OperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "BackupDRClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BackupDRClient", -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/pagers.py deleted file mode 100644 index 5d9cdc794750..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ /dev/null @@ -1,1003 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault - - -class ListManagementServersPager: - """A pager for iterating through ``list_management_servers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListManagementServersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``management_servers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListManagementServers`` requests and continue to iterate - through the ``management_servers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListManagementServersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupdr.ListManagementServersResponse], - request: backupdr.ListManagementServersRequest, - response: backupdr.ListManagementServersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListManagementServersRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListManagementServersResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupdr.ListManagementServersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupdr.ListManagementServersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupdr.ManagementServer]: - for page in self.pages: - yield from page.management_servers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListManagementServersAsyncPager: - """A pager for iterating through ``list_management_servers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListManagementServersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``management_servers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListManagementServers`` requests and continue to iterate - through the ``management_servers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListManagementServersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupdr.ListManagementServersResponse]], - request: backupdr.ListManagementServersRequest, - response: backupdr.ListManagementServersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListManagementServersRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListManagementServersResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupdr.ListManagementServersRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupdr.ListManagementServersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupdr.ManagementServer]: - async def async_generator(): - async for page in self.pages: - for response in page.management_servers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupVaultsPager: - """A pager for iterating through ``list_backup_vaults`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backup_vaults`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupVaults`` requests and continue to iterate - through the ``backup_vaults`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupvault.ListBackupVaultsResponse], - request: backupvault.ListBackupVaultsRequest, - response: backupvault.ListBackupVaultsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.ListBackupVaultsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupvault.ListBackupVaultsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupvault.BackupVault]: - for page in self.pages: - yield from page.backup_vaults - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupVaultsAsyncPager: - """A pager for iterating through ``list_backup_vaults`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backup_vaults`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupVaults`` requests and continue to iterate - through the ``backup_vaults`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupVaultsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupvault.ListBackupVaultsResponse]], - request: backupvault.ListBackupVaultsRequest, - response: backupvault.ListBackupVaultsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupVaultsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupVaultsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.ListBackupVaultsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupvault.ListBackupVaultsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: - async def async_generator(): - async for page in self.pages: - for response in page.backup_vaults: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchUsableBackupVaultsPager: - """A pager for iterating through ``fetch_usable_backup_vaults`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backup_vaults`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``FetchUsableBackupVaults`` requests and continue to iterate - through the ``backup_vaults`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupvault.FetchUsableBackupVaultsResponse], - request: backupvault.FetchUsableBackupVaultsRequest, - response: backupvault.FetchUsableBackupVaultsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.FetchUsableBackupVaultsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupvault.FetchUsableBackupVaultsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupvault.BackupVault]: - for page in self.pages: - yield from page.backup_vaults - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class FetchUsableBackupVaultsAsyncPager: - """A pager for iterating through ``fetch_usable_backup_vaults`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backup_vaults`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``FetchUsableBackupVaults`` requests and continue to iterate - through the ``backup_vaults`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupvault.FetchUsableBackupVaultsResponse]], - request: backupvault.FetchUsableBackupVaultsRequest, - response: backupvault.FetchUsableBackupVaultsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.FetchUsableBackupVaultsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.FetchUsableBackupVaultsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupvault.FetchUsableBackupVaultsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupvault.BackupVault]: - async def async_generator(): - async for page in self.pages: - for response in page.backup_vaults: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataSourcesPager: - """A pager for iterating through ``list_data_sources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_sources`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataSources`` requests and continue to iterate - through the ``data_sources`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupvault.ListDataSourcesResponse], - request: backupvault.ListDataSourcesRequest, - response: backupvault.ListDataSourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.ListDataSourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupvault.ListDataSourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupvault.DataSource]: - for page in self.pages: - yield from page.data_sources - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataSourcesAsyncPager: - """A pager for iterating through ``list_data_sources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_sources`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataSources`` requests and continue to iterate - through the ``data_sources`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupvault.ListDataSourcesResponse]], - request: backupvault.ListDataSourcesRequest, - response: backupvault.ListDataSourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListDataSourcesRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListDataSourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.ListDataSourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupvault.ListDataSourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupvault.DataSource]: - async def async_generator(): - async for page in self.pages: - for response in page.data_sources: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupvault.ListBackupsResponse], - request: backupvault.ListBackupsRequest, - response: backupvault.ListBackupsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.ListBackupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupvault.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupvault.Backup]: - for page in self.pages: - yield from page.backups - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupsAsyncPager: - """A pager for iterating through ``list_backups`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backups`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackups`` requests and continue to iterate - through the ``backups`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupvault.ListBackupsResponse]], - request: backupvault.ListBackupsRequest, - response: backupvault.ListBackupsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupvault.ListBackupsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupvault.ListBackupsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupvault.Backup]: - async def async_generator(): - async for page in self.pages: - for response in page.backups: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupPlansPager: - """A pager for iterating through ``list_backup_plans`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backup_plans`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupPlans`` requests and continue to iterate - through the ``backup_plans`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupplan.ListBackupPlansResponse], - request: backupplan.ListBackupPlansRequest, - response: backupplan.ListBackupPlansResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupplan.ListBackupPlansRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupplan.ListBackupPlansResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupplan.BackupPlan]: - for page in self.pages: - yield from page.backup_plans - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupPlansAsyncPager: - """A pager for iterating through ``list_backup_plans`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backup_plans`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupPlans`` requests and continue to iterate - through the ``backup_plans`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlansResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupplan.ListBackupPlansResponse]], - request: backupplan.ListBackupPlansRequest, - response: backupplan.ListBackupPlansResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupPlansRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupPlansResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupplan.ListBackupPlansRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupplan.ListBackupPlansResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupplan.BackupPlan]: - async def async_generator(): - async for page in self.pages: - for response in page.backup_plans: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupPlanAssociationsPager: - """A pager for iterating through ``list_backup_plan_associations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``backup_plan_associations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBackupPlanAssociations`` requests and continue to iterate - through the ``backup_plan_associations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., backupplanassociation.ListBackupPlanAssociationsResponse], - request: backupplanassociation.ListBackupPlanAssociationsRequest, - response: backupplanassociation.ListBackupPlanAssociationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[backupplanassociation.ListBackupPlanAssociationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[backupplanassociation.BackupPlanAssociation]: - for page in self.pages: - yield from page.backup_plan_associations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBackupPlanAssociationsAsyncPager: - """A pager for iterating through ``list_backup_plan_associations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``backup_plan_associations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBackupPlanAssociations`` requests and continue to iterate - through the ``backup_plan_associations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse]], - request: backupplanassociation.ListBackupPlanAssociationsRequest, - response: backupplanassociation.ListBackupPlanAssociationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest): - The initial request object. - response (google.cloud.backupdr_v1.types.ListBackupPlanAssociationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = backupplanassociation.ListBackupPlanAssociationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[backupplanassociation.ListBackupPlanAssociationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[backupplanassociation.BackupPlanAssociation]: - async def async_generator(): - async for page in self.pages: - for response in page.backup_plan_associations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/README.rst b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/README.rst deleted file mode 100644 index 036cdee4bde3..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`BackupDRTransport` is the ABC for all transports. -- public child `BackupDRGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `BackupDRGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseBackupDRRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `BackupDRRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/__init__.py deleted file mode 100644 index 77cad80ab95c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BackupDRTransport -from .grpc import BackupDRGrpcTransport -from .grpc_asyncio import BackupDRGrpcAsyncIOTransport -from .rest import BackupDRRestTransport -from .rest import BackupDRRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BackupDRTransport]] -_transport_registry['grpc'] = BackupDRGrpcTransport -_transport_registry['grpc_asyncio'] = BackupDRGrpcAsyncIOTransport -_transport_registry['rest'] = BackupDRRestTransport - -__all__ = ( - 'BackupDRTransport', - 'BackupDRGrpcTransport', - 'BackupDRGrpcAsyncIOTransport', - 'BackupDRRestTransport', - 'BackupDRRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/base.py deleted file mode 100644 index 7f1e7c067fa4..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ /dev/null @@ -1,780 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.backupdr_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BackupDRTransport(abc.ABC): - """Abstract transport class for BackupDR.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'backupdr.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_management_servers: gapic_v1.method.wrap_method( - self.list_management_servers, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_management_server: gapic_v1.method.wrap_method( - self.get_management_server, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_management_server: gapic_v1.method.wrap_method( - self.create_management_server, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_management_server: gapic_v1.method.wrap_method( - self.delete_management_server, - default_timeout=60.0, - client_info=client_info, - ), - self.create_backup_vault: gapic_v1.method.wrap_method( - self.create_backup_vault, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_backup_vaults: gapic_v1.method.wrap_method( - self.list_backup_vaults, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_usable_backup_vaults: gapic_v1.method.wrap_method( - self.fetch_usable_backup_vaults, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_backup_vault: gapic_v1.method.wrap_method( - self.get_backup_vault, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_backup_vault: gapic_v1.method.wrap_method( - self.update_backup_vault, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_backup_vault: gapic_v1.method.wrap_method( - self.delete_backup_vault, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_data_sources: gapic_v1.method.wrap_method( - self.list_data_sources, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_data_source: gapic_v1.method.wrap_method( - self.get_data_source, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_data_source: gapic_v1.method.wrap_method( - self.update_data_source, - default_timeout=60.0, - client_info=client_info, - ), - self.list_backups: gapic_v1.method.wrap_method( - self.list_backups, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_backup: gapic_v1.method.wrap_method( - self.get_backup, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_backup: gapic_v1.method.wrap_method( - self.update_backup, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_backup: gapic_v1.method.wrap_method( - self.delete_backup, - default_timeout=None, - client_info=client_info, - ), - self.restore_backup: gapic_v1.method.wrap_method( - self.restore_backup, - default_timeout=60.0, - client_info=client_info, - ), - self.create_backup_plan: gapic_v1.method.wrap_method( - self.create_backup_plan, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_plan: gapic_v1.method.wrap_method( - self.get_backup_plan, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_plans: gapic_v1.method.wrap_method( - self.list_backup_plans, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_plan: gapic_v1.method.wrap_method( - self.delete_backup_plan, - default_timeout=None, - client_info=client_info, - ), - self.create_backup_plan_association: gapic_v1.method.wrap_method( - self.create_backup_plan_association, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_plan_association: gapic_v1.method.wrap_method( - self.get_backup_plan_association, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_plan_associations: gapic_v1.method.wrap_method( - self.list_backup_plan_associations, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_plan_association: gapic_v1.method.wrap_method( - self.delete_backup_plan_association, - default_timeout=None, - client_info=client_info, - ), - self.trigger_backup: gapic_v1.method.wrap_method( - self.trigger_backup, - default_timeout=None, - client_info=client_info, - ), - self.initialize_service: gapic_v1.method.wrap_method( - self.initialize_service, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_management_servers(self) -> Callable[ - [backupdr.ListManagementServersRequest], - Union[ - backupdr.ListManagementServersResponse, - Awaitable[backupdr.ListManagementServersResponse] - ]]: - raise NotImplementedError() - - @property - def get_management_server(self) -> Callable[ - [backupdr.GetManagementServerRequest], - Union[ - backupdr.ManagementServer, - Awaitable[backupdr.ManagementServer] - ]]: - raise NotImplementedError() - - @property - def create_management_server(self) -> Callable[ - [backupdr.CreateManagementServerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_management_server(self) -> Callable[ - [backupdr.DeleteManagementServerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_backup_vault(self) -> Callable[ - [backupvault.CreateBackupVaultRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_backup_vaults(self) -> Callable[ - [backupvault.ListBackupVaultsRequest], - Union[ - backupvault.ListBackupVaultsResponse, - Awaitable[backupvault.ListBackupVaultsResponse] - ]]: - raise NotImplementedError() - - @property - def fetch_usable_backup_vaults(self) -> Callable[ - [backupvault.FetchUsableBackupVaultsRequest], - Union[ - backupvault.FetchUsableBackupVaultsResponse, - Awaitable[backupvault.FetchUsableBackupVaultsResponse] - ]]: - raise NotImplementedError() - - @property - def get_backup_vault(self) -> Callable[ - [backupvault.GetBackupVaultRequest], - Union[ - backupvault.BackupVault, - Awaitable[backupvault.BackupVault] - ]]: - raise NotImplementedError() - - @property - def update_backup_vault(self) -> Callable[ - [backupvault.UpdateBackupVaultRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_backup_vault(self) -> Callable[ - [backupvault.DeleteBackupVaultRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_data_sources(self) -> Callable[ - [backupvault.ListDataSourcesRequest], - Union[ - backupvault.ListDataSourcesResponse, - Awaitable[backupvault.ListDataSourcesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_source(self) -> Callable[ - [backupvault.GetDataSourceRequest], - Union[ - backupvault.DataSource, - Awaitable[backupvault.DataSource] - ]]: - raise NotImplementedError() - - @property - def update_data_source(self) -> Callable[ - [backupvault.UpdateDataSourceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_backups(self) -> Callable[ - [backupvault.ListBackupsRequest], - Union[ - backupvault.ListBackupsResponse, - Awaitable[backupvault.ListBackupsResponse] - ]]: - raise NotImplementedError() - - @property - def get_backup(self) -> Callable[ - [backupvault.GetBackupRequest], - Union[ - backupvault.Backup, - Awaitable[backupvault.Backup] - ]]: - raise NotImplementedError() - - @property - def update_backup(self) -> Callable[ - [backupvault.UpdateBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_backup(self) -> Callable[ - [backupvault.DeleteBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def restore_backup(self) -> Callable[ - [backupvault.RestoreBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_backup_plan(self) -> Callable[ - [backupplan.CreateBackupPlanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_backup_plan(self) -> Callable[ - [backupplan.GetBackupPlanRequest], - Union[ - backupplan.BackupPlan, - Awaitable[backupplan.BackupPlan] - ]]: - raise NotImplementedError() - - @property - def list_backup_plans(self) -> Callable[ - [backupplan.ListBackupPlansRequest], - Union[ - backupplan.ListBackupPlansResponse, - Awaitable[backupplan.ListBackupPlansResponse] - ]]: - raise NotImplementedError() - - @property - def delete_backup_plan(self) -> Callable[ - [backupplan.DeleteBackupPlanRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_backup_plan_association(self) -> Callable[ - [backupplanassociation.CreateBackupPlanAssociationRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_backup_plan_association(self) -> Callable[ - [backupplanassociation.GetBackupPlanAssociationRequest], - Union[ - backupplanassociation.BackupPlanAssociation, - Awaitable[backupplanassociation.BackupPlanAssociation] - ]]: - raise NotImplementedError() - - @property - def list_backup_plan_associations(self) -> Callable[ - [backupplanassociation.ListBackupPlanAssociationsRequest], - Union[ - backupplanassociation.ListBackupPlanAssociationsResponse, - Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_backup_plan_association(self) -> Callable[ - [backupplanassociation.DeleteBackupPlanAssociationRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def trigger_backup(self) -> Callable[ - [backupplanassociation.TriggerBackupRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def initialize_service(self) -> Callable[ - [backupdr.InitializeServiceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BackupDRTransport', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py deleted file mode 100644 index 68931c713241..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ /dev/null @@ -1,1267 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import BackupDRTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BackupDRGrpcTransport(BackupDRTransport): - """gRPC backend transport for BackupDR. - - The BackupDR Service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'backupdr.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'backupdr.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_management_servers(self) -> Callable[ - [backupdr.ListManagementServersRequest], - backupdr.ListManagementServersResponse]: - r"""Return a callable for the list management servers method over gRPC. - - Lists ManagementServers in a given project and - location. - - Returns: - Callable[[~.ListManagementServersRequest], - ~.ListManagementServersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_management_servers' not in self._stubs: - self._stubs['list_management_servers'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListManagementServers', - request_serializer=backupdr.ListManagementServersRequest.serialize, - response_deserializer=backupdr.ListManagementServersResponse.deserialize, - ) - return self._stubs['list_management_servers'] - - @property - def get_management_server(self) -> Callable[ - [backupdr.GetManagementServerRequest], - backupdr.ManagementServer]: - r"""Return a callable for the get management server method over gRPC. - - Gets details of a single ManagementServer. - - Returns: - Callable[[~.GetManagementServerRequest], - ~.ManagementServer]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_management_server' not in self._stubs: - self._stubs['get_management_server'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetManagementServer', - request_serializer=backupdr.GetManagementServerRequest.serialize, - response_deserializer=backupdr.ManagementServer.deserialize, - ) - return self._stubs['get_management_server'] - - @property - def create_management_server(self) -> Callable[ - [backupdr.CreateManagementServerRequest], - operations_pb2.Operation]: - r"""Return a callable for the create management server method over gRPC. - - Creates a new ManagementServer in a given project and - location. - - Returns: - Callable[[~.CreateManagementServerRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_management_server' not in self._stubs: - self._stubs['create_management_server'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateManagementServer', - request_serializer=backupdr.CreateManagementServerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_management_server'] - - @property - def delete_management_server(self) -> Callable[ - [backupdr.DeleteManagementServerRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete management server method over gRPC. - - Deletes a single ManagementServer. - - Returns: - Callable[[~.DeleteManagementServerRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_management_server' not in self._stubs: - self._stubs['delete_management_server'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteManagementServer', - request_serializer=backupdr.DeleteManagementServerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_management_server'] - - @property - def create_backup_vault(self) -> Callable[ - [backupvault.CreateBackupVaultRequest], - operations_pb2.Operation]: - r"""Return a callable for the create backup vault method over gRPC. - - Creates a new BackupVault in a given project and - location. - - Returns: - Callable[[~.CreateBackupVaultRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_vault' not in self._stubs: - self._stubs['create_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateBackupVault', - request_serializer=backupvault.CreateBackupVaultRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup_vault'] - - @property - def list_backup_vaults(self) -> Callable[ - [backupvault.ListBackupVaultsRequest], - backupvault.ListBackupVaultsResponse]: - r"""Return a callable for the list backup vaults method over gRPC. - - Lists BackupVaults in a given project and location. - - Returns: - Callable[[~.ListBackupVaultsRequest], - ~.ListBackupVaultsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_vaults' not in self._stubs: - self._stubs['list_backup_vaults'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackupVaults', - request_serializer=backupvault.ListBackupVaultsRequest.serialize, - response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, - ) - return self._stubs['list_backup_vaults'] - - @property - def fetch_usable_backup_vaults(self) -> Callable[ - [backupvault.FetchUsableBackupVaultsRequest], - backupvault.FetchUsableBackupVaultsResponse]: - r"""Return a callable for the fetch usable backup vaults method over gRPC. - - FetchUsableBackupVaults lists usable BackupVaults in - a given project and location. Usable BackupVault are the - ones that user has backupdr.backupVaults.get permission. - - Returns: - Callable[[~.FetchUsableBackupVaultsRequest], - ~.FetchUsableBackupVaultsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_usable_backup_vaults' not in self._stubs: - self._stubs['fetch_usable_backup_vaults'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults', - request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, - response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, - ) - return self._stubs['fetch_usable_backup_vaults'] - - @property - def get_backup_vault(self) -> Callable[ - [backupvault.GetBackupVaultRequest], - backupvault.BackupVault]: - r"""Return a callable for the get backup vault method over gRPC. - - Gets details of a BackupVault. - - Returns: - Callable[[~.GetBackupVaultRequest], - ~.BackupVault]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_vault' not in self._stubs: - self._stubs['get_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackupVault', - request_serializer=backupvault.GetBackupVaultRequest.serialize, - response_deserializer=backupvault.BackupVault.deserialize, - ) - return self._stubs['get_backup_vault'] - - @property - def update_backup_vault(self) -> Callable[ - [backupvault.UpdateBackupVaultRequest], - operations_pb2.Operation]: - r"""Return a callable for the update backup vault method over gRPC. - - Updates the settings of a BackupVault. - - Returns: - Callable[[~.UpdateBackupVaultRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_vault' not in self._stubs: - self._stubs['update_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault', - request_serializer=backupvault.UpdateBackupVaultRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_backup_vault'] - - @property - def delete_backup_vault(self) -> Callable[ - [backupvault.DeleteBackupVaultRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete backup vault method over gRPC. - - Deletes a BackupVault. - - Returns: - Callable[[~.DeleteBackupVaultRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_vault' not in self._stubs: - self._stubs['delete_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault', - request_serializer=backupvault.DeleteBackupVaultRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup_vault'] - - @property - def list_data_sources(self) -> Callable[ - [backupvault.ListDataSourcesRequest], - backupvault.ListDataSourcesResponse]: - r"""Return a callable for the list data sources method over gRPC. - - Lists DataSources in a given project and location. - - Returns: - Callable[[~.ListDataSourcesRequest], - ~.ListDataSourcesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_sources' not in self._stubs: - self._stubs['list_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListDataSources', - request_serializer=backupvault.ListDataSourcesRequest.serialize, - response_deserializer=backupvault.ListDataSourcesResponse.deserialize, - ) - return self._stubs['list_data_sources'] - - @property - def get_data_source(self) -> Callable[ - [backupvault.GetDataSourceRequest], - backupvault.DataSource]: - r"""Return a callable for the get data source method over gRPC. - - Gets details of a DataSource. - - Returns: - Callable[[~.GetDataSourceRequest], - ~.DataSource]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_source' not in self._stubs: - self._stubs['get_data_source'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetDataSource', - request_serializer=backupvault.GetDataSourceRequest.serialize, - response_deserializer=backupvault.DataSource.deserialize, - ) - return self._stubs['get_data_source'] - - @property - def update_data_source(self) -> Callable[ - [backupvault.UpdateDataSourceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update data source method over gRPC. - - Updates the settings of a DataSource. - - Returns: - Callable[[~.UpdateDataSourceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_source' not in self._stubs: - self._stubs['update_data_source'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/UpdateDataSource', - request_serializer=backupvault.UpdateDataSourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_source'] - - @property - def list_backups(self) -> Callable[ - [backupvault.ListBackupsRequest], - backupvault.ListBackupsResponse]: - r"""Return a callable for the list backups method over gRPC. - - Lists Backups in a given project and location. - - Returns: - Callable[[~.ListBackupsRequest], - ~.ListBackupsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackups', - request_serializer=backupvault.ListBackupsRequest.serialize, - response_deserializer=backupvault.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def get_backup(self) -> Callable[ - [backupvault.GetBackupRequest], - backupvault.Backup]: - r"""Return a callable for the get backup method over gRPC. - - Gets details of a Backup. - - Returns: - Callable[[~.GetBackupRequest], - ~.Backup]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackup', - request_serializer=backupvault.GetBackupRequest.serialize, - response_deserializer=backupvault.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [backupvault.UpdateBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the update backup method over gRPC. - - Updates the settings of a Backup. - - Returns: - Callable[[~.UpdateBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/UpdateBackup', - request_serializer=backupvault.UpdateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backupvault.DeleteBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a Backup. - - Returns: - Callable[[~.DeleteBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackup', - request_serializer=backupvault.DeleteBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup'] - - @property - def restore_backup(self) -> Callable[ - [backupvault.RestoreBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the restore backup method over gRPC. - - Restore from a Backup - - Returns: - Callable[[~.RestoreBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_backup' not in self._stubs: - self._stubs['restore_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/RestoreBackup', - request_serializer=backupvault.RestoreBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_backup'] - - @property - def create_backup_plan(self) -> Callable[ - [backupplan.CreateBackupPlanRequest], - operations_pb2.Operation]: - r"""Return a callable for the create backup plan method over gRPC. - - Create a BackupPlan - - Returns: - Callable[[~.CreateBackupPlanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_plan' not in self._stubs: - self._stubs['create_backup_plan'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan', - request_serializer=backupplan.CreateBackupPlanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup_plan'] - - @property - def get_backup_plan(self) -> Callable[ - [backupplan.GetBackupPlanRequest], - backupplan.BackupPlan]: - r"""Return a callable for the get backup plan method over gRPC. - - Gets details of a single BackupPlan. - - Returns: - Callable[[~.GetBackupPlanRequest], - ~.BackupPlan]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_plan' not in self._stubs: - self._stubs['get_backup_plan'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackupPlan', - request_serializer=backupplan.GetBackupPlanRequest.serialize, - response_deserializer=backupplan.BackupPlan.deserialize, - ) - return self._stubs['get_backup_plan'] - - @property - def list_backup_plans(self) -> Callable[ - [backupplan.ListBackupPlansRequest], - backupplan.ListBackupPlansResponse]: - r"""Return a callable for the list backup plans method over gRPC. - - Lists BackupPlans in a given project and location. - - Returns: - Callable[[~.ListBackupPlansRequest], - ~.ListBackupPlansResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_plans' not in self._stubs: - self._stubs['list_backup_plans'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackupPlans', - request_serializer=backupplan.ListBackupPlansRequest.serialize, - response_deserializer=backupplan.ListBackupPlansResponse.deserialize, - ) - return self._stubs['list_backup_plans'] - - @property - def delete_backup_plan(self) -> Callable[ - [backupplan.DeleteBackupPlanRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete backup plan method over gRPC. - - Deletes a single BackupPlan. - - Returns: - Callable[[~.DeleteBackupPlanRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_plan' not in self._stubs: - self._stubs['delete_backup_plan'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan', - request_serializer=backupplan.DeleteBackupPlanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup_plan'] - - @property - def create_backup_plan_association(self) -> Callable[ - [backupplanassociation.CreateBackupPlanAssociationRequest], - operations_pb2.Operation]: - r"""Return a callable for the create backup plan association method over gRPC. - - Create a BackupPlanAssociation - - Returns: - Callable[[~.CreateBackupPlanAssociationRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_plan_association' not in self._stubs: - self._stubs['create_backup_plan_association'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation', - request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup_plan_association'] - - @property - def get_backup_plan_association(self) -> Callable[ - [backupplanassociation.GetBackupPlanAssociationRequest], - backupplanassociation.BackupPlanAssociation]: - r"""Return a callable for the get backup plan association method over gRPC. - - Gets details of a single BackupPlanAssociation. - - Returns: - Callable[[~.GetBackupPlanAssociationRequest], - ~.BackupPlanAssociation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_plan_association' not in self._stubs: - self._stubs['get_backup_plan_association'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation', - request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, - response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, - ) - return self._stubs['get_backup_plan_association'] - - @property - def list_backup_plan_associations(self) -> Callable[ - [backupplanassociation.ListBackupPlanAssociationsRequest], - backupplanassociation.ListBackupPlanAssociationsResponse]: - r"""Return a callable for the list backup plan associations method over gRPC. - - Lists BackupPlanAssociations in a given project and - location. - - Returns: - Callable[[~.ListBackupPlanAssociationsRequest], - ~.ListBackupPlanAssociationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_plan_associations' not in self._stubs: - self._stubs['list_backup_plan_associations'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations', - request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, - response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, - ) - return self._stubs['list_backup_plan_associations'] - - @property - def delete_backup_plan_association(self) -> Callable[ - [backupplanassociation.DeleteBackupPlanAssociationRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete backup plan association method over gRPC. - - Deletes a single BackupPlanAssociation. - - Returns: - Callable[[~.DeleteBackupPlanAssociationRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_plan_association' not in self._stubs: - self._stubs['delete_backup_plan_association'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation', - request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup_plan_association'] - - @property - def trigger_backup(self) -> Callable[ - [backupplanassociation.TriggerBackupRequest], - operations_pb2.Operation]: - r"""Return a callable for the trigger backup method over gRPC. - - Triggers a new Backup. - - Returns: - Callable[[~.TriggerBackupRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'trigger_backup' not in self._stubs: - self._stubs['trigger_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/TriggerBackup', - request_serializer=backupplanassociation.TriggerBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['trigger_backup'] - - @property - def initialize_service(self) -> Callable[ - [backupdr.InitializeServiceRequest], - operations_pb2.Operation]: - r"""Return a callable for the initialize service method over gRPC. - - Initializes the service related config for a project. - - Returns: - Callable[[~.InitializeServiceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'initialize_service' not in self._stubs: - self._stubs['initialize_service'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/InitializeService', - request_serializer=backupdr.InitializeServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['initialize_service'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BackupDRGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py deleted file mode 100644 index d1b54004e824..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ /dev/null @@ -1,1575 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import BackupDRTransport, DEFAULT_CLIENT_INFO -from .grpc import BackupDRGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BackupDRGrpcAsyncIOTransport(BackupDRTransport): - """gRPC AsyncIO backend transport for BackupDR. - - The BackupDR Service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'backupdr.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'backupdr.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_management_servers(self) -> Callable[ - [backupdr.ListManagementServersRequest], - Awaitable[backupdr.ListManagementServersResponse]]: - r"""Return a callable for the list management servers method over gRPC. - - Lists ManagementServers in a given project and - location. - - Returns: - Callable[[~.ListManagementServersRequest], - Awaitable[~.ListManagementServersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_management_servers' not in self._stubs: - self._stubs['list_management_servers'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListManagementServers', - request_serializer=backupdr.ListManagementServersRequest.serialize, - response_deserializer=backupdr.ListManagementServersResponse.deserialize, - ) - return self._stubs['list_management_servers'] - - @property - def get_management_server(self) -> Callable[ - [backupdr.GetManagementServerRequest], - Awaitable[backupdr.ManagementServer]]: - r"""Return a callable for the get management server method over gRPC. - - Gets details of a single ManagementServer. - - Returns: - Callable[[~.GetManagementServerRequest], - Awaitable[~.ManagementServer]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_management_server' not in self._stubs: - self._stubs['get_management_server'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetManagementServer', - request_serializer=backupdr.GetManagementServerRequest.serialize, - response_deserializer=backupdr.ManagementServer.deserialize, - ) - return self._stubs['get_management_server'] - - @property - def create_management_server(self) -> Callable[ - [backupdr.CreateManagementServerRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create management server method over gRPC. - - Creates a new ManagementServer in a given project and - location. - - Returns: - Callable[[~.CreateManagementServerRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_management_server' not in self._stubs: - self._stubs['create_management_server'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateManagementServer', - request_serializer=backupdr.CreateManagementServerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_management_server'] - - @property - def delete_management_server(self) -> Callable[ - [backupdr.DeleteManagementServerRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete management server method over gRPC. - - Deletes a single ManagementServer. - - Returns: - Callable[[~.DeleteManagementServerRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_management_server' not in self._stubs: - self._stubs['delete_management_server'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteManagementServer', - request_serializer=backupdr.DeleteManagementServerRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_management_server'] - - @property - def create_backup_vault(self) -> Callable[ - [backupvault.CreateBackupVaultRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create backup vault method over gRPC. - - Creates a new BackupVault in a given project and - location. - - Returns: - Callable[[~.CreateBackupVaultRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_vault' not in self._stubs: - self._stubs['create_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateBackupVault', - request_serializer=backupvault.CreateBackupVaultRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup_vault'] - - @property - def list_backup_vaults(self) -> Callable[ - [backupvault.ListBackupVaultsRequest], - Awaitable[backupvault.ListBackupVaultsResponse]]: - r"""Return a callable for the list backup vaults method over gRPC. - - Lists BackupVaults in a given project and location. - - Returns: - Callable[[~.ListBackupVaultsRequest], - Awaitable[~.ListBackupVaultsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_vaults' not in self._stubs: - self._stubs['list_backup_vaults'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackupVaults', - request_serializer=backupvault.ListBackupVaultsRequest.serialize, - response_deserializer=backupvault.ListBackupVaultsResponse.deserialize, - ) - return self._stubs['list_backup_vaults'] - - @property - def fetch_usable_backup_vaults(self) -> Callable[ - [backupvault.FetchUsableBackupVaultsRequest], - Awaitable[backupvault.FetchUsableBackupVaultsResponse]]: - r"""Return a callable for the fetch usable backup vaults method over gRPC. - - FetchUsableBackupVaults lists usable BackupVaults in - a given project and location. Usable BackupVault are the - ones that user has backupdr.backupVaults.get permission. - - Returns: - Callable[[~.FetchUsableBackupVaultsRequest], - Awaitable[~.FetchUsableBackupVaultsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'fetch_usable_backup_vaults' not in self._stubs: - self._stubs['fetch_usable_backup_vaults'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/FetchUsableBackupVaults', - request_serializer=backupvault.FetchUsableBackupVaultsRequest.serialize, - response_deserializer=backupvault.FetchUsableBackupVaultsResponse.deserialize, - ) - return self._stubs['fetch_usable_backup_vaults'] - - @property - def get_backup_vault(self) -> Callable[ - [backupvault.GetBackupVaultRequest], - Awaitable[backupvault.BackupVault]]: - r"""Return a callable for the get backup vault method over gRPC. - - Gets details of a BackupVault. - - Returns: - Callable[[~.GetBackupVaultRequest], - Awaitable[~.BackupVault]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_vault' not in self._stubs: - self._stubs['get_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackupVault', - request_serializer=backupvault.GetBackupVaultRequest.serialize, - response_deserializer=backupvault.BackupVault.deserialize, - ) - return self._stubs['get_backup_vault'] - - @property - def update_backup_vault(self) -> Callable[ - [backupvault.UpdateBackupVaultRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update backup vault method over gRPC. - - Updates the settings of a BackupVault. - - Returns: - Callable[[~.UpdateBackupVaultRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup_vault' not in self._stubs: - self._stubs['update_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/UpdateBackupVault', - request_serializer=backupvault.UpdateBackupVaultRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_backup_vault'] - - @property - def delete_backup_vault(self) -> Callable[ - [backupvault.DeleteBackupVaultRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete backup vault method over gRPC. - - Deletes a BackupVault. - - Returns: - Callable[[~.DeleteBackupVaultRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_vault' not in self._stubs: - self._stubs['delete_backup_vault'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackupVault', - request_serializer=backupvault.DeleteBackupVaultRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup_vault'] - - @property - def list_data_sources(self) -> Callable[ - [backupvault.ListDataSourcesRequest], - Awaitable[backupvault.ListDataSourcesResponse]]: - r"""Return a callable for the list data sources method over gRPC. - - Lists DataSources in a given project and location. - - Returns: - Callable[[~.ListDataSourcesRequest], - Awaitable[~.ListDataSourcesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_sources' not in self._stubs: - self._stubs['list_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListDataSources', - request_serializer=backupvault.ListDataSourcesRequest.serialize, - response_deserializer=backupvault.ListDataSourcesResponse.deserialize, - ) - return self._stubs['list_data_sources'] - - @property - def get_data_source(self) -> Callable[ - [backupvault.GetDataSourceRequest], - Awaitable[backupvault.DataSource]]: - r"""Return a callable for the get data source method over gRPC. - - Gets details of a DataSource. - - Returns: - Callable[[~.GetDataSourceRequest], - Awaitable[~.DataSource]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_source' not in self._stubs: - self._stubs['get_data_source'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetDataSource', - request_serializer=backupvault.GetDataSourceRequest.serialize, - response_deserializer=backupvault.DataSource.deserialize, - ) - return self._stubs['get_data_source'] - - @property - def update_data_source(self) -> Callable[ - [backupvault.UpdateDataSourceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update data source method over gRPC. - - Updates the settings of a DataSource. - - Returns: - Callable[[~.UpdateDataSourceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_source' not in self._stubs: - self._stubs['update_data_source'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/UpdateDataSource', - request_serializer=backupvault.UpdateDataSourceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_data_source'] - - @property - def list_backups(self) -> Callable[ - [backupvault.ListBackupsRequest], - Awaitable[backupvault.ListBackupsResponse]]: - r"""Return a callable for the list backups method over gRPC. - - Lists Backups in a given project and location. - - Returns: - Callable[[~.ListBackupsRequest], - Awaitable[~.ListBackupsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backups' not in self._stubs: - self._stubs['list_backups'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackups', - request_serializer=backupvault.ListBackupsRequest.serialize, - response_deserializer=backupvault.ListBackupsResponse.deserialize, - ) - return self._stubs['list_backups'] - - @property - def get_backup(self) -> Callable[ - [backupvault.GetBackupRequest], - Awaitable[backupvault.Backup]]: - r"""Return a callable for the get backup method over gRPC. - - Gets details of a Backup. - - Returns: - Callable[[~.GetBackupRequest], - Awaitable[~.Backup]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup' not in self._stubs: - self._stubs['get_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackup', - request_serializer=backupvault.GetBackupRequest.serialize, - response_deserializer=backupvault.Backup.deserialize, - ) - return self._stubs['get_backup'] - - @property - def update_backup(self) -> Callable[ - [backupvault.UpdateBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update backup method over gRPC. - - Updates the settings of a Backup. - - Returns: - Callable[[~.UpdateBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_backup' not in self._stubs: - self._stubs['update_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/UpdateBackup', - request_serializer=backupvault.UpdateBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_backup'] - - @property - def delete_backup(self) -> Callable[ - [backupvault.DeleteBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete backup method over gRPC. - - Deletes a Backup. - - Returns: - Callable[[~.DeleteBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup' not in self._stubs: - self._stubs['delete_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackup', - request_serializer=backupvault.DeleteBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup'] - - @property - def restore_backup(self) -> Callable[ - [backupvault.RestoreBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restore backup method over gRPC. - - Restore from a Backup - - Returns: - Callable[[~.RestoreBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_backup' not in self._stubs: - self._stubs['restore_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/RestoreBackup', - request_serializer=backupvault.RestoreBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_backup'] - - @property - def create_backup_plan(self) -> Callable[ - [backupplan.CreateBackupPlanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create backup plan method over gRPC. - - Create a BackupPlan - - Returns: - Callable[[~.CreateBackupPlanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_plan' not in self._stubs: - self._stubs['create_backup_plan'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateBackupPlan', - request_serializer=backupplan.CreateBackupPlanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup_plan'] - - @property - def get_backup_plan(self) -> Callable[ - [backupplan.GetBackupPlanRequest], - Awaitable[backupplan.BackupPlan]]: - r"""Return a callable for the get backup plan method over gRPC. - - Gets details of a single BackupPlan. - - Returns: - Callable[[~.GetBackupPlanRequest], - Awaitable[~.BackupPlan]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_plan' not in self._stubs: - self._stubs['get_backup_plan'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackupPlan', - request_serializer=backupplan.GetBackupPlanRequest.serialize, - response_deserializer=backupplan.BackupPlan.deserialize, - ) - return self._stubs['get_backup_plan'] - - @property - def list_backup_plans(self) -> Callable[ - [backupplan.ListBackupPlansRequest], - Awaitable[backupplan.ListBackupPlansResponse]]: - r"""Return a callable for the list backup plans method over gRPC. - - Lists BackupPlans in a given project and location. - - Returns: - Callable[[~.ListBackupPlansRequest], - Awaitable[~.ListBackupPlansResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_plans' not in self._stubs: - self._stubs['list_backup_plans'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackupPlans', - request_serializer=backupplan.ListBackupPlansRequest.serialize, - response_deserializer=backupplan.ListBackupPlansResponse.deserialize, - ) - return self._stubs['list_backup_plans'] - - @property - def delete_backup_plan(self) -> Callable[ - [backupplan.DeleteBackupPlanRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete backup plan method over gRPC. - - Deletes a single BackupPlan. - - Returns: - Callable[[~.DeleteBackupPlanRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_plan' not in self._stubs: - self._stubs['delete_backup_plan'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlan', - request_serializer=backupplan.DeleteBackupPlanRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup_plan'] - - @property - def create_backup_plan_association(self) -> Callable[ - [backupplanassociation.CreateBackupPlanAssociationRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create backup plan association method over gRPC. - - Create a BackupPlanAssociation - - Returns: - Callable[[~.CreateBackupPlanAssociationRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_backup_plan_association' not in self._stubs: - self._stubs['create_backup_plan_association'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/CreateBackupPlanAssociation', - request_serializer=backupplanassociation.CreateBackupPlanAssociationRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_backup_plan_association'] - - @property - def get_backup_plan_association(self) -> Callable[ - [backupplanassociation.GetBackupPlanAssociationRequest], - Awaitable[backupplanassociation.BackupPlanAssociation]]: - r"""Return a callable for the get backup plan association method over gRPC. - - Gets details of a single BackupPlanAssociation. - - Returns: - Callable[[~.GetBackupPlanAssociationRequest], - Awaitable[~.BackupPlanAssociation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_backup_plan_association' not in self._stubs: - self._stubs['get_backup_plan_association'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/GetBackupPlanAssociation', - request_serializer=backupplanassociation.GetBackupPlanAssociationRequest.serialize, - response_deserializer=backupplanassociation.BackupPlanAssociation.deserialize, - ) - return self._stubs['get_backup_plan_association'] - - @property - def list_backup_plan_associations(self) -> Callable[ - [backupplanassociation.ListBackupPlanAssociationsRequest], - Awaitable[backupplanassociation.ListBackupPlanAssociationsResponse]]: - r"""Return a callable for the list backup plan associations method over gRPC. - - Lists BackupPlanAssociations in a given project and - location. - - Returns: - Callable[[~.ListBackupPlanAssociationsRequest], - Awaitable[~.ListBackupPlanAssociationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_backup_plan_associations' not in self._stubs: - self._stubs['list_backup_plan_associations'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/ListBackupPlanAssociations', - request_serializer=backupplanassociation.ListBackupPlanAssociationsRequest.serialize, - response_deserializer=backupplanassociation.ListBackupPlanAssociationsResponse.deserialize, - ) - return self._stubs['list_backup_plan_associations'] - - @property - def delete_backup_plan_association(self) -> Callable[ - [backupplanassociation.DeleteBackupPlanAssociationRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete backup plan association method over gRPC. - - Deletes a single BackupPlanAssociation. - - Returns: - Callable[[~.DeleteBackupPlanAssociationRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_backup_plan_association' not in self._stubs: - self._stubs['delete_backup_plan_association'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/DeleteBackupPlanAssociation', - request_serializer=backupplanassociation.DeleteBackupPlanAssociationRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_backup_plan_association'] - - @property - def trigger_backup(self) -> Callable[ - [backupplanassociation.TriggerBackupRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the trigger backup method over gRPC. - - Triggers a new Backup. - - Returns: - Callable[[~.TriggerBackupRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'trigger_backup' not in self._stubs: - self._stubs['trigger_backup'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/TriggerBackup', - request_serializer=backupplanassociation.TriggerBackupRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['trigger_backup'] - - @property - def initialize_service(self) -> Callable[ - [backupdr.InitializeServiceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the initialize service method over gRPC. - - Initializes the service related config for a project. - - Returns: - Callable[[~.InitializeServiceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'initialize_service' not in self._stubs: - self._stubs['initialize_service'] = self._logged_channel.unary_unary( - '/google.cloud.backupdr.v1.BackupDR/InitializeService', - request_serializer=backupdr.InitializeServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['initialize_service'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_management_servers: self._wrap_method( - self.list_management_servers, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_management_server: self._wrap_method( - self.get_management_server, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_management_server: self._wrap_method( - self.create_management_server, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_management_server: self._wrap_method( - self.delete_management_server, - default_timeout=60.0, - client_info=client_info, - ), - self.create_backup_vault: self._wrap_method( - self.create_backup_vault, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_backup_vaults: self._wrap_method( - self.list_backup_vaults, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.fetch_usable_backup_vaults: self._wrap_method( - self.fetch_usable_backup_vaults, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_backup_vault: self._wrap_method( - self.get_backup_vault, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_backup_vault: self._wrap_method( - self.update_backup_vault, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_backup_vault: self._wrap_method( - self.delete_backup_vault, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_data_sources: self._wrap_method( - self.list_data_sources, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_data_source: self._wrap_method( - self.get_data_source, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_data_source: self._wrap_method( - self.update_data_source, - default_timeout=60.0, - client_info=client_info, - ), - self.list_backups: self._wrap_method( - self.list_backups, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_backup: self._wrap_method( - self.get_backup, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_backup: self._wrap_method( - self.update_backup, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_backup: self._wrap_method( - self.delete_backup, - default_timeout=None, - client_info=client_info, - ), - self.restore_backup: self._wrap_method( - self.restore_backup, - default_timeout=60.0, - client_info=client_info, - ), - self.create_backup_plan: self._wrap_method( - self.create_backup_plan, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_plan: self._wrap_method( - self.get_backup_plan, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_plans: self._wrap_method( - self.list_backup_plans, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_plan: self._wrap_method( - self.delete_backup_plan, - default_timeout=None, - client_info=client_info, - ), - self.create_backup_plan_association: self._wrap_method( - self.create_backup_plan_association, - default_timeout=None, - client_info=client_info, - ), - self.get_backup_plan_association: self._wrap_method( - self.get_backup_plan_association, - default_timeout=None, - client_info=client_info, - ), - self.list_backup_plan_associations: self._wrap_method( - self.list_backup_plan_associations, - default_timeout=None, - client_info=client_info, - ), - self.delete_backup_plan_association: self._wrap_method( - self.delete_backup_plan_association, - default_timeout=None, - client_info=client_info, - ), - self.trigger_backup: self._wrap_method( - self.trigger_backup, - default_timeout=None, - client_info=client_info, - ), - self.initialize_service: self._wrap_method( - self.initialize_service, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'BackupDRGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py deleted file mode 100644 index 8c3e496ee9d0..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ /dev/null @@ -1,6329 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseBackupDRRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class BackupDRRestInterceptor: - """Interceptor for BackupDR. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the BackupDRRestTransport. - - .. code-block:: python - class MyCustomBackupDRInterceptor(BackupDRRestInterceptor): - def pre_create_backup_plan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_plan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup_plan_association(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_plan_association(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_backup_vault(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_backup_vault(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_management_server(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_management_server(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup_plan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_backup_plan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup_plan_association(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_backup_plan_association(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_backup_vault(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_backup_vault(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_management_server(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_management_server(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_fetch_usable_backup_vaults(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_fetch_usable_backup_vaults(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_plan(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_plan(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_plan_association(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_plan_association(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_backup_vault(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_backup_vault(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_data_source(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_source(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_management_server(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_management_server(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_initialize_service(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_initialize_service(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_plan_associations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_plan_associations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_plans(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_plans(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backups(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backups(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_backup_vaults(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_backup_vaults(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_sources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_sources(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_management_servers(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_management_servers(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restore_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restore_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_trigger_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_trigger_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_backup_vault(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_backup_vault(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_data_source(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_data_source(self, response): - logging.log(f"Received response: {response}") - return response - - transport = BackupDRRestTransport(interceptor=MyCustomBackupDRInterceptor()) - client = BackupDRClient(transport=transport) - - - """ - def pre_create_backup_plan(self, request: backupplan.CreateBackupPlanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplan.CreateBackupPlanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup_plan - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_create_backup_plan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_backup_plan - - DEPRECATED. Please use the `post_create_backup_plan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_create_backup_plan` interceptor runs - before the `post_create_backup_plan_with_metadata` interceptor. - """ - return response - - def post_create_backup_plan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup_plan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_create_backup_plan_with_metadata` - interceptor in new development instead of the `post_create_backup_plan` interceptor. - When both interceptors are used, this `post_create_backup_plan_with_metadata` interceptor runs after the - `post_create_backup_plan` interceptor. The (possibly modified) response returned by - `post_create_backup_plan` will be passed to - `post_create_backup_plan_with_metadata`. - """ - return response, metadata - - def pre_create_backup_plan_association(self, request: backupplanassociation.CreateBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.CreateBackupPlanAssociationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup_plan_association - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_create_backup_plan_association(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_backup_plan_association - - DEPRECATED. Please use the `post_create_backup_plan_association_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_create_backup_plan_association` interceptor runs - before the `post_create_backup_plan_association_with_metadata` interceptor. - """ - return response - - def post_create_backup_plan_association_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup_plan_association - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_create_backup_plan_association_with_metadata` - interceptor in new development instead of the `post_create_backup_plan_association` interceptor. - When both interceptors are used, this `post_create_backup_plan_association_with_metadata` interceptor runs after the - `post_create_backup_plan_association` interceptor. The (possibly modified) response returned by - `post_create_backup_plan_association` will be passed to - `post_create_backup_plan_association_with_metadata`. - """ - return response, metadata - - def pre_create_backup_vault(self, request: backupvault.CreateBackupVaultRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.CreateBackupVaultRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_backup_vault - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_create_backup_vault(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_backup_vault - - DEPRECATED. Please use the `post_create_backup_vault_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_create_backup_vault` interceptor runs - before the `post_create_backup_vault_with_metadata` interceptor. - """ - return response - - def post_create_backup_vault_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_backup_vault - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_create_backup_vault_with_metadata` - interceptor in new development instead of the `post_create_backup_vault` interceptor. - When both interceptors are used, this `post_create_backup_vault_with_metadata` interceptor runs after the - `post_create_backup_vault` interceptor. The (possibly modified) response returned by - `post_create_backup_vault` will be passed to - `post_create_backup_vault_with_metadata`. - """ - return response, metadata - - def pre_create_management_server(self, request: backupdr.CreateManagementServerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.CreateManagementServerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_management_server - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_create_management_server(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_management_server - - DEPRECATED. Please use the `post_create_management_server_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_create_management_server` interceptor runs - before the `post_create_management_server_with_metadata` interceptor. - """ - return response - - def post_create_management_server_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_management_server - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_create_management_server_with_metadata` - interceptor in new development instead of the `post_create_management_server` interceptor. - When both interceptors are used, this `post_create_management_server_with_metadata` interceptor runs after the - `post_create_management_server` interceptor. The (possibly modified) response returned by - `post_create_management_server` will be passed to - `post_create_management_server_with_metadata`. - """ - return response, metadata - - def pre_delete_backup(self, request: backupvault.DeleteBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.DeleteBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_delete_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_backup - - DEPRECATED. Please use the `post_delete_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_delete_backup` interceptor runs - before the `post_delete_backup_with_metadata` interceptor. - """ - return response - - def post_delete_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_delete_backup_with_metadata` - interceptor in new development instead of the `post_delete_backup` interceptor. - When both interceptors are used, this `post_delete_backup_with_metadata` interceptor runs after the - `post_delete_backup` interceptor. The (possibly modified) response returned by - `post_delete_backup` will be passed to - `post_delete_backup_with_metadata`. - """ - return response, metadata - - def pre_delete_backup_plan(self, request: backupplan.DeleteBackupPlanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplan.DeleteBackupPlanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup_plan - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_delete_backup_plan(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_backup_plan - - DEPRECATED. Please use the `post_delete_backup_plan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_delete_backup_plan` interceptor runs - before the `post_delete_backup_plan_with_metadata` interceptor. - """ - return response - - def post_delete_backup_plan_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_backup_plan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_delete_backup_plan_with_metadata` - interceptor in new development instead of the `post_delete_backup_plan` interceptor. - When both interceptors are used, this `post_delete_backup_plan_with_metadata` interceptor runs after the - `post_delete_backup_plan` interceptor. The (possibly modified) response returned by - `post_delete_backup_plan` will be passed to - `post_delete_backup_plan_with_metadata`. - """ - return response, metadata - - def pre_delete_backup_plan_association(self, request: backupplanassociation.DeleteBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.DeleteBackupPlanAssociationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup_plan_association - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_delete_backup_plan_association(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_backup_plan_association - - DEPRECATED. Please use the `post_delete_backup_plan_association_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_delete_backup_plan_association` interceptor runs - before the `post_delete_backup_plan_association_with_metadata` interceptor. - """ - return response - - def post_delete_backup_plan_association_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_backup_plan_association - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_delete_backup_plan_association_with_metadata` - interceptor in new development instead of the `post_delete_backup_plan_association` interceptor. - When both interceptors are used, this `post_delete_backup_plan_association_with_metadata` interceptor runs after the - `post_delete_backup_plan_association` interceptor. The (possibly modified) response returned by - `post_delete_backup_plan_association` will be passed to - `post_delete_backup_plan_association_with_metadata`. - """ - return response, metadata - - def pre_delete_backup_vault(self, request: backupvault.DeleteBackupVaultRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.DeleteBackupVaultRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_backup_vault - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_delete_backup_vault(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_backup_vault - - DEPRECATED. Please use the `post_delete_backup_vault_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_delete_backup_vault` interceptor runs - before the `post_delete_backup_vault_with_metadata` interceptor. - """ - return response - - def post_delete_backup_vault_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_backup_vault - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_delete_backup_vault_with_metadata` - interceptor in new development instead of the `post_delete_backup_vault` interceptor. - When both interceptors are used, this `post_delete_backup_vault_with_metadata` interceptor runs after the - `post_delete_backup_vault` interceptor. The (possibly modified) response returned by - `post_delete_backup_vault` will be passed to - `post_delete_backup_vault_with_metadata`. - """ - return response, metadata - - def pre_delete_management_server(self, request: backupdr.DeleteManagementServerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.DeleteManagementServerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_management_server - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_delete_management_server(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_management_server - - DEPRECATED. Please use the `post_delete_management_server_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_delete_management_server` interceptor runs - before the `post_delete_management_server_with_metadata` interceptor. - """ - return response - - def post_delete_management_server_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_management_server - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_delete_management_server_with_metadata` - interceptor in new development instead of the `post_delete_management_server` interceptor. - When both interceptors are used, this `post_delete_management_server_with_metadata` interceptor runs after the - `post_delete_management_server` interceptor. The (possibly modified) response returned by - `post_delete_management_server` will be passed to - `post_delete_management_server_with_metadata`. - """ - return response, metadata - - def pre_fetch_usable_backup_vaults(self, request: backupvault.FetchUsableBackupVaultsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.FetchUsableBackupVaultsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for fetch_usable_backup_vaults - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_fetch_usable_backup_vaults(self, response: backupvault.FetchUsableBackupVaultsResponse) -> backupvault.FetchUsableBackupVaultsResponse: - """Post-rpc interceptor for fetch_usable_backup_vaults - - DEPRECATED. Please use the `post_fetch_usable_backup_vaults_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_fetch_usable_backup_vaults` interceptor runs - before the `post_fetch_usable_backup_vaults_with_metadata` interceptor. - """ - return response - - def post_fetch_usable_backup_vaults_with_metadata(self, response: backupvault.FetchUsableBackupVaultsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.FetchUsableBackupVaultsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for fetch_usable_backup_vaults - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_fetch_usable_backup_vaults_with_metadata` - interceptor in new development instead of the `post_fetch_usable_backup_vaults` interceptor. - When both interceptors are used, this `post_fetch_usable_backup_vaults_with_metadata` interceptor runs after the - `post_fetch_usable_backup_vaults` interceptor. The (possibly modified) response returned by - `post_fetch_usable_backup_vaults` will be passed to - `post_fetch_usable_backup_vaults_with_metadata`. - """ - return response, metadata - - def pre_get_backup(self, request: backupvault.GetBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.GetBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_backup(self, response: backupvault.Backup) -> backupvault.Backup: - """Post-rpc interceptor for get_backup - - DEPRECATED. Please use the `post_get_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_get_backup` interceptor runs - before the `post_get_backup_with_metadata` interceptor. - """ - return response - - def post_get_backup_with_metadata(self, response: backupvault.Backup, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.Backup, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_get_backup_with_metadata` - interceptor in new development instead of the `post_get_backup` interceptor. - When both interceptors are used, this `post_get_backup_with_metadata` interceptor runs after the - `post_get_backup` interceptor. The (possibly modified) response returned by - `post_get_backup` will be passed to - `post_get_backup_with_metadata`. - """ - return response, metadata - - def pre_get_backup_plan(self, request: backupplan.GetBackupPlanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplan.GetBackupPlanRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup_plan - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_backup_plan(self, response: backupplan.BackupPlan) -> backupplan.BackupPlan: - """Post-rpc interceptor for get_backup_plan - - DEPRECATED. Please use the `post_get_backup_plan_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_get_backup_plan` interceptor runs - before the `post_get_backup_plan_with_metadata` interceptor. - """ - return response - - def post_get_backup_plan_with_metadata(self, response: backupplan.BackupPlan, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplan.BackupPlan, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup_plan - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_get_backup_plan_with_metadata` - interceptor in new development instead of the `post_get_backup_plan` interceptor. - When both interceptors are used, this `post_get_backup_plan_with_metadata` interceptor runs after the - `post_get_backup_plan` interceptor. The (possibly modified) response returned by - `post_get_backup_plan` will be passed to - `post_get_backup_plan_with_metadata`. - """ - return response, metadata - - def pre_get_backup_plan_association(self, request: backupplanassociation.GetBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.GetBackupPlanAssociationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup_plan_association - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_backup_plan_association(self, response: backupplanassociation.BackupPlanAssociation) -> backupplanassociation.BackupPlanAssociation: - """Post-rpc interceptor for get_backup_plan_association - - DEPRECATED. Please use the `post_get_backup_plan_association_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_get_backup_plan_association` interceptor runs - before the `post_get_backup_plan_association_with_metadata` interceptor. - """ - return response - - def post_get_backup_plan_association_with_metadata(self, response: backupplanassociation.BackupPlanAssociation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.BackupPlanAssociation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup_plan_association - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_get_backup_plan_association_with_metadata` - interceptor in new development instead of the `post_get_backup_plan_association` interceptor. - When both interceptors are used, this `post_get_backup_plan_association_with_metadata` interceptor runs after the - `post_get_backup_plan_association` interceptor. The (possibly modified) response returned by - `post_get_backup_plan_association` will be passed to - `post_get_backup_plan_association_with_metadata`. - """ - return response, metadata - - def pre_get_backup_vault(self, request: backupvault.GetBackupVaultRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.GetBackupVaultRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_backup_vault - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_backup_vault(self, response: backupvault.BackupVault) -> backupvault.BackupVault: - """Post-rpc interceptor for get_backup_vault - - DEPRECATED. Please use the `post_get_backup_vault_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_get_backup_vault` interceptor runs - before the `post_get_backup_vault_with_metadata` interceptor. - """ - return response - - def post_get_backup_vault_with_metadata(self, response: backupvault.BackupVault, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.BackupVault, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_backup_vault - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_get_backup_vault_with_metadata` - interceptor in new development instead of the `post_get_backup_vault` interceptor. - When both interceptors are used, this `post_get_backup_vault_with_metadata` interceptor runs after the - `post_get_backup_vault` interceptor. The (possibly modified) response returned by - `post_get_backup_vault` will be passed to - `post_get_backup_vault_with_metadata`. - """ - return response, metadata - - def pre_get_data_source(self, request: backupvault.GetDataSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.GetDataSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_source - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_data_source(self, response: backupvault.DataSource) -> backupvault.DataSource: - """Post-rpc interceptor for get_data_source - - DEPRECATED. Please use the `post_get_data_source_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_get_data_source` interceptor runs - before the `post_get_data_source_with_metadata` interceptor. - """ - return response - - def post_get_data_source_with_metadata(self, response: backupvault.DataSource, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.DataSource, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_source - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_get_data_source_with_metadata` - interceptor in new development instead of the `post_get_data_source` interceptor. - When both interceptors are used, this `post_get_data_source_with_metadata` interceptor runs after the - `post_get_data_source` interceptor. The (possibly modified) response returned by - `post_get_data_source` will be passed to - `post_get_data_source_with_metadata`. - """ - return response, metadata - - def pre_get_management_server(self, request: backupdr.GetManagementServerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.GetManagementServerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_management_server - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_management_server(self, response: backupdr.ManagementServer) -> backupdr.ManagementServer: - """Post-rpc interceptor for get_management_server - - DEPRECATED. Please use the `post_get_management_server_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_get_management_server` interceptor runs - before the `post_get_management_server_with_metadata` interceptor. - """ - return response - - def post_get_management_server_with_metadata(self, response: backupdr.ManagementServer, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.ManagementServer, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_management_server - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_get_management_server_with_metadata` - interceptor in new development instead of the `post_get_management_server` interceptor. - When both interceptors are used, this `post_get_management_server_with_metadata` interceptor runs after the - `post_get_management_server` interceptor. The (possibly modified) response returned by - `post_get_management_server` will be passed to - `post_get_management_server_with_metadata`. - """ - return response, metadata - - def pre_initialize_service(self, request: backupdr.InitializeServiceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.InitializeServiceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for initialize_service - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_initialize_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for initialize_service - - DEPRECATED. Please use the `post_initialize_service_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_initialize_service` interceptor runs - before the `post_initialize_service_with_metadata` interceptor. - """ - return response - - def post_initialize_service_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for initialize_service - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_initialize_service_with_metadata` - interceptor in new development instead of the `post_initialize_service` interceptor. - When both interceptors are used, this `post_initialize_service_with_metadata` interceptor runs after the - `post_initialize_service` interceptor. The (possibly modified) response returned by - `post_initialize_service` will be passed to - `post_initialize_service_with_metadata`. - """ - return response, metadata - - def pre_list_backup_plan_associations(self, request: backupplanassociation.ListBackupPlanAssociationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.ListBackupPlanAssociationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_plan_associations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_backup_plan_associations(self, response: backupplanassociation.ListBackupPlanAssociationsResponse) -> backupplanassociation.ListBackupPlanAssociationsResponse: - """Post-rpc interceptor for list_backup_plan_associations - - DEPRECATED. Please use the `post_list_backup_plan_associations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_list_backup_plan_associations` interceptor runs - before the `post_list_backup_plan_associations_with_metadata` interceptor. - """ - return response - - def post_list_backup_plan_associations_with_metadata(self, response: backupplanassociation.ListBackupPlanAssociationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.ListBackupPlanAssociationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_plan_associations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_list_backup_plan_associations_with_metadata` - interceptor in new development instead of the `post_list_backup_plan_associations` interceptor. - When both interceptors are used, this `post_list_backup_plan_associations_with_metadata` interceptor runs after the - `post_list_backup_plan_associations` interceptor. The (possibly modified) response returned by - `post_list_backup_plan_associations` will be passed to - `post_list_backup_plan_associations_with_metadata`. - """ - return response, metadata - - def pre_list_backup_plans(self, request: backupplan.ListBackupPlansRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplan.ListBackupPlansRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_plans - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_backup_plans(self, response: backupplan.ListBackupPlansResponse) -> backupplan.ListBackupPlansResponse: - """Post-rpc interceptor for list_backup_plans - - DEPRECATED. Please use the `post_list_backup_plans_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_list_backup_plans` interceptor runs - before the `post_list_backup_plans_with_metadata` interceptor. - """ - return response - - def post_list_backup_plans_with_metadata(self, response: backupplan.ListBackupPlansResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplan.ListBackupPlansResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_plans - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_list_backup_plans_with_metadata` - interceptor in new development instead of the `post_list_backup_plans` interceptor. - When both interceptors are used, this `post_list_backup_plans_with_metadata` interceptor runs after the - `post_list_backup_plans` interceptor. The (possibly modified) response returned by - `post_list_backup_plans` will be passed to - `post_list_backup_plans_with_metadata`. - """ - return response, metadata - - def pre_list_backups(self, request: backupvault.ListBackupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.ListBackupsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backups - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_backups(self, response: backupvault.ListBackupsResponse) -> backupvault.ListBackupsResponse: - """Post-rpc interceptor for list_backups - - DEPRECATED. Please use the `post_list_backups_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_list_backups` interceptor runs - before the `post_list_backups_with_metadata` interceptor. - """ - return response - - def post_list_backups_with_metadata(self, response: backupvault.ListBackupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.ListBackupsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backups - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_list_backups_with_metadata` - interceptor in new development instead of the `post_list_backups` interceptor. - When both interceptors are used, this `post_list_backups_with_metadata` interceptor runs after the - `post_list_backups` interceptor. The (possibly modified) response returned by - `post_list_backups` will be passed to - `post_list_backups_with_metadata`. - """ - return response, metadata - - def pre_list_backup_vaults(self, request: backupvault.ListBackupVaultsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.ListBackupVaultsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_backup_vaults - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_backup_vaults(self, response: backupvault.ListBackupVaultsResponse) -> backupvault.ListBackupVaultsResponse: - """Post-rpc interceptor for list_backup_vaults - - DEPRECATED. Please use the `post_list_backup_vaults_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_list_backup_vaults` interceptor runs - before the `post_list_backup_vaults_with_metadata` interceptor. - """ - return response - - def post_list_backup_vaults_with_metadata(self, response: backupvault.ListBackupVaultsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.ListBackupVaultsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_backup_vaults - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_list_backup_vaults_with_metadata` - interceptor in new development instead of the `post_list_backup_vaults` interceptor. - When both interceptors are used, this `post_list_backup_vaults_with_metadata` interceptor runs after the - `post_list_backup_vaults` interceptor. The (possibly modified) response returned by - `post_list_backup_vaults` will be passed to - `post_list_backup_vaults_with_metadata`. - """ - return response, metadata - - def pre_list_data_sources(self, request: backupvault.ListDataSourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.ListDataSourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_sources - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_data_sources(self, response: backupvault.ListDataSourcesResponse) -> backupvault.ListDataSourcesResponse: - """Post-rpc interceptor for list_data_sources - - DEPRECATED. Please use the `post_list_data_sources_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_list_data_sources` interceptor runs - before the `post_list_data_sources_with_metadata` interceptor. - """ - return response - - def post_list_data_sources_with_metadata(self, response: backupvault.ListDataSourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.ListDataSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_sources - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_list_data_sources_with_metadata` - interceptor in new development instead of the `post_list_data_sources` interceptor. - When both interceptors are used, this `post_list_data_sources_with_metadata` interceptor runs after the - `post_list_data_sources` interceptor. The (possibly modified) response returned by - `post_list_data_sources` will be passed to - `post_list_data_sources_with_metadata`. - """ - return response, metadata - - def pre_list_management_servers(self, request: backupdr.ListManagementServersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.ListManagementServersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_management_servers - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_management_servers(self, response: backupdr.ListManagementServersResponse) -> backupdr.ListManagementServersResponse: - """Post-rpc interceptor for list_management_servers - - DEPRECATED. Please use the `post_list_management_servers_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_list_management_servers` interceptor runs - before the `post_list_management_servers_with_metadata` interceptor. - """ - return response - - def post_list_management_servers_with_metadata(self, response: backupdr.ListManagementServersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupdr.ListManagementServersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_management_servers - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_list_management_servers_with_metadata` - interceptor in new development instead of the `post_list_management_servers` interceptor. - When both interceptors are used, this `post_list_management_servers_with_metadata` interceptor runs after the - `post_list_management_servers` interceptor. The (possibly modified) response returned by - `post_list_management_servers` will be passed to - `post_list_management_servers_with_metadata`. - """ - return response, metadata - - def pre_restore_backup(self, request: backupvault.RestoreBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.RestoreBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for restore_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_restore_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for restore_backup - - DEPRECATED. Please use the `post_restore_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_restore_backup` interceptor runs - before the `post_restore_backup_with_metadata` interceptor. - """ - return response - - def post_restore_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restore_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_restore_backup_with_metadata` - interceptor in new development instead of the `post_restore_backup` interceptor. - When both interceptors are used, this `post_restore_backup_with_metadata` interceptor runs after the - `post_restore_backup` interceptor. The (possibly modified) response returned by - `post_restore_backup` will be passed to - `post_restore_backup_with_metadata`. - """ - return response, metadata - - def pre_trigger_backup(self, request: backupplanassociation.TriggerBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupplanassociation.TriggerBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for trigger_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_trigger_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for trigger_backup - - DEPRECATED. Please use the `post_trigger_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_trigger_backup` interceptor runs - before the `post_trigger_backup_with_metadata` interceptor. - """ - return response - - def post_trigger_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for trigger_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_trigger_backup_with_metadata` - interceptor in new development instead of the `post_trigger_backup` interceptor. - When both interceptors are used, this `post_trigger_backup_with_metadata` interceptor runs after the - `post_trigger_backup` interceptor. The (possibly modified) response returned by - `post_trigger_backup` will be passed to - `post_trigger_backup_with_metadata`. - """ - return response, metadata - - def pre_update_backup(self, request: backupvault.UpdateBackupRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.UpdateBackupRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_backup - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_update_backup(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_backup - - DEPRECATED. Please use the `post_update_backup_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_update_backup` interceptor runs - before the `post_update_backup_with_metadata` interceptor. - """ - return response - - def post_update_backup_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_update_backup_with_metadata` - interceptor in new development instead of the `post_update_backup` interceptor. - When both interceptors are used, this `post_update_backup_with_metadata` interceptor runs after the - `post_update_backup` interceptor. The (possibly modified) response returned by - `post_update_backup` will be passed to - `post_update_backup_with_metadata`. - """ - return response, metadata - - def pre_update_backup_vault(self, request: backupvault.UpdateBackupVaultRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_backup_vault - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_update_backup_vault(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_backup_vault - - DEPRECATED. Please use the `post_update_backup_vault_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_update_backup_vault` interceptor runs - before the `post_update_backup_vault_with_metadata` interceptor. - """ - return response - - def post_update_backup_vault_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup_vault - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_update_backup_vault_with_metadata` - interceptor in new development instead of the `post_update_backup_vault` interceptor. - When both interceptors are used, this `post_update_backup_vault_with_metadata` interceptor runs after the - `post_update_backup_vault` interceptor. The (possibly modified) response returned by - `post_update_backup_vault` will be passed to - `post_update_backup_vault_with_metadata`. - """ - return response, metadata - - def pre_update_data_source(self, request: backupvault.UpdateDataSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_data_source - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_update_data_source(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_data_source - - DEPRECATED. Please use the `post_update_data_source_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. This `post_update_data_source` interceptor runs - before the `post_update_data_source_with_metadata` interceptor. - """ - return response - - def post_update_data_source_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_source - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BackupDR server but before it is returned to user code. - - We recommend only using this `post_update_data_source_with_metadata` - interceptor in new development instead of the `post_update_data_source` interceptor. - When both interceptors are used, this `post_update_data_source_with_metadata` interceptor runs after the - `post_update_data_source` interceptor. The (possibly modified) response returned by - `post_update_data_source` will be passed to - `post_update_data_source_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BackupDR server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the BackupDR server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class BackupDRRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BackupDRRestInterceptor - - -class BackupDRRestTransport(_BaseBackupDRRestTransport): - """REST backend synchronous transport for BackupDR. - - The BackupDR Service - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'backupdr.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BackupDRRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BackupDRRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateBackupPlan(_BaseBackupDRRestTransport._BaseCreateBackupPlan, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.CreateBackupPlan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupplan.CreateBackupPlanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create backup plan method over HTTP. - - Args: - request (~.backupplan.CreateBackupPlanRequest): - The request object. The request message for creating a ``BackupPlan``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseCreateBackupPlan._get_http_options() - - request, metadata = self._interceptor.pre_create_backup_plan(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseCreateBackupPlan._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseCreateBackupPlan._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseCreateBackupPlan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.CreateBackupPlan", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateBackupPlan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._CreateBackupPlan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup_plan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_plan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.create_backup_plan", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateBackupPlan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackupPlanAssociation(_BaseBackupDRRestTransport._BaseCreateBackupPlanAssociation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.CreateBackupPlanAssociation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupplanassociation.CreateBackupPlanAssociationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create backup plan - association method over HTTP. - - Args: - request (~.backupplanassociation.CreateBackupPlanAssociationRequest): - The request object. Request message for creating a backup - plan. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseCreateBackupPlanAssociation._get_http_options() - - request, metadata = self._interceptor.pre_create_backup_plan_association(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseCreateBackupPlanAssociation._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseCreateBackupPlanAssociation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseCreateBackupPlanAssociation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.CreateBackupPlanAssociation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateBackupPlanAssociation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._CreateBackupPlanAssociation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup_plan_association(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_plan_association_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.create_backup_plan_association", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateBackupPlanAssociation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateBackupVault(_BaseBackupDRRestTransport._BaseCreateBackupVault, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.CreateBackupVault") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupvault.CreateBackupVaultRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create backup vault method over HTTP. - - Args: - request (~.backupvault.CreateBackupVaultRequest): - The request object. Message for creating a BackupVault. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseCreateBackupVault._get_http_options() - - request, metadata = self._interceptor.pre_create_backup_vault(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseCreateBackupVault._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseCreateBackupVault._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseCreateBackupVault._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.CreateBackupVault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateBackupVault", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._CreateBackupVault._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_backup_vault(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_backup_vault_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.create_backup_vault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateBackupVault", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateManagementServer(_BaseBackupDRRestTransport._BaseCreateManagementServer, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.CreateManagementServer") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupdr.CreateManagementServerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create management server method over HTTP. - - Args: - request (~.backupdr.CreateManagementServerRequest): - The request object. Request message for creating a - management server instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseCreateManagementServer._get_http_options() - - request, metadata = self._interceptor.pre_create_management_server(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseCreateManagementServer._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseCreateManagementServer._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseCreateManagementServer._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.CreateManagementServer", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateManagementServer", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._CreateManagementServer._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_management_server(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_management_server_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.create_management_server", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CreateManagementServer", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackup(_BaseBackupDRRestTransport._BaseDeleteBackup, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.DeleteBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.DeleteBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete backup method over HTTP. - - Args: - request (~.backupvault.DeleteBackupRequest): - The request object. Message for deleting a Backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseDeleteBackup._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseDeleteBackup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseDeleteBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.DeleteBackup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._DeleteBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.delete_backup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackupPlan(_BaseBackupDRRestTransport._BaseDeleteBackupPlan, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.DeleteBackupPlan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupplan.DeleteBackupPlanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete backup plan method over HTTP. - - Args: - request (~.backupplan.DeleteBackupPlanRequest): - The request object. The request message for deleting a ``BackupPlan``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseDeleteBackupPlan._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup_plan(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseDeleteBackupPlan._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseDeleteBackupPlan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.DeleteBackupPlan", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackupPlan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._DeleteBackupPlan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_backup_plan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_backup_plan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackupPlan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackupPlanAssociation(_BaseBackupDRRestTransport._BaseDeleteBackupPlanAssociation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.DeleteBackupPlanAssociation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupplanassociation.DeleteBackupPlanAssociationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete backup plan - association method over HTTP. - - Args: - request (~.backupplanassociation.DeleteBackupPlanAssociationRequest): - The request object. Request message for deleting a backup - plan association. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseDeleteBackupPlanAssociation._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup_plan_association(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseDeleteBackupPlanAssociation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseDeleteBackupPlanAssociation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.DeleteBackupPlanAssociation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackupPlanAssociation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._DeleteBackupPlanAssociation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_backup_plan_association(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_backup_plan_association_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan_association", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackupPlanAssociation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBackupVault(_BaseBackupDRRestTransport._BaseDeleteBackupVault, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.DeleteBackupVault") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.DeleteBackupVaultRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete backup vault method over HTTP. - - Args: - request (~.backupvault.DeleteBackupVaultRequest): - The request object. Message for deleting a BackupVault. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseDeleteBackupVault._get_http_options() - - request, metadata = self._interceptor.pre_delete_backup_vault(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseDeleteBackupVault._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseDeleteBackupVault._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.DeleteBackupVault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackupVault", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._DeleteBackupVault._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_backup_vault(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_backup_vault_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.delete_backup_vault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteBackupVault", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteManagementServer(_BaseBackupDRRestTransport._BaseDeleteManagementServer, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.DeleteManagementServer") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupdr.DeleteManagementServerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete management server method over HTTP. - - Args: - request (~.backupdr.DeleteManagementServerRequest): - The request object. Request message for deleting a - management server instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseDeleteManagementServer._get_http_options() - - request, metadata = self._interceptor.pre_delete_management_server(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseDeleteManagementServer._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseDeleteManagementServer._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.DeleteManagementServer", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteManagementServer", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._DeleteManagementServer._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_management_server(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_management_server_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.delete_management_server", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteManagementServer", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _FetchUsableBackupVaults(_BaseBackupDRRestTransport._BaseFetchUsableBackupVaults, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.FetchUsableBackupVaults") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.FetchUsableBackupVaultsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.FetchUsableBackupVaultsResponse: - r"""Call the fetch usable backup - vaults method over HTTP. - - Args: - request (~.backupvault.FetchUsableBackupVaultsRequest): - The request object. Request message for fetching usable - BackupVaults. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.FetchUsableBackupVaultsResponse: - Response message for fetching usable - BackupVaults. - - """ - - http_options = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_http_options() - - request, metadata = self._interceptor.pre_fetch_usable_backup_vaults(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.FetchUsableBackupVaults", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "FetchUsableBackupVaults", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._FetchUsableBackupVaults._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.FetchUsableBackupVaultsResponse() - pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_fetch_usable_backup_vaults(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_fetch_usable_backup_vaults_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.FetchUsableBackupVaultsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "FetchUsableBackupVaults", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackup(_BaseBackupDRRestTransport._BaseGetBackup, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.GetBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.Backup: - r"""Call the get backup method over HTTP. - - Args: - request (~.backupvault.GetBackupRequest): - The request object. Request message for getting a Backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.Backup: - Message describing a Backup object. - """ - - http_options = _BaseBackupDRRestTransport._BaseGetBackup._get_http_options() - - request, metadata = self._interceptor.pre_get_backup(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetBackup._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.Backup() - pb_resp = backupvault.Backup.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.Backup.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackupPlan(_BaseBackupDRRestTransport._BaseGetBackupPlan, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetBackupPlan") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupplan.GetBackupPlanRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupplan.BackupPlan: - r"""Call the get backup plan method over HTTP. - - Args: - request (~.backupplan.GetBackupPlanRequest): - The request object. The request message for getting a ``BackupPlan``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupplan.BackupPlan: - A ``BackupPlan`` specifies some common fields, such as - ``description`` as well as one or more ``BackupRule`` - messages. Each ``BackupRule`` has a retention policy and - defines a schedule by which the system is to perform - backup workloads. - - """ - - http_options = _BaseBackupDRRestTransport._BaseGetBackupPlan._get_http_options() - - request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetBackupPlan._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetBackupPlan._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlan", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlan", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetBackupPlan._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupplan.BackupPlan() - pb_resp = backupplan.BackupPlan.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup_plan(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_plan_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupplan.BackupPlan.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlan", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackupPlanAssociation(_BaseBackupDRRestTransport._BaseGetBackupPlanAssociation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetBackupPlanAssociation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupplanassociation.GetBackupPlanAssociationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupplanassociation.BackupPlanAssociation: - r"""Call the get backup plan - association method over HTTP. - - Args: - request (~.backupplanassociation.GetBackupPlanAssociationRequest): - The request object. Request message for getting a - BackupPlanAssociation resource. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupplanassociation.BackupPlanAssociation: - A BackupPlanAssociation represents a - single BackupPlanAssociation which - contains details like workload, backup - plan etc - - """ - - http_options = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_http_options() - - request, metadata = self._interceptor.pre_get_backup_plan_association(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlanAssociation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlanAssociation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetBackupPlanAssociation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupplanassociation.BackupPlanAssociation() - pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup_plan_association(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_plan_association_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupplanassociation.BackupPlanAssociation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlanAssociation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBackupVault(_BaseBackupDRRestTransport._BaseGetBackupVault, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetBackupVault") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.GetBackupVaultRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.BackupVault: - r"""Call the get backup vault method over HTTP. - - Args: - request (~.backupvault.GetBackupVaultRequest): - The request object. Request message for getting a - BackupVault. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.BackupVault: - Message describing a BackupVault - object. - - """ - - http_options = _BaseBackupDRRestTransport._BaseGetBackupVault._get_http_options() - - request, metadata = self._interceptor.pre_get_backup_vault(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetBackupVault._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetBackupVault._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupVault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupVault", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetBackupVault._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.BackupVault() - pb_resp = backupvault.BackupVault.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_backup_vault(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_vault_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.BackupVault.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupVault", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDataSource(_BaseBackupDRRestTransport._BaseGetDataSource, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetDataSource") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.GetDataSourceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.DataSource: - r"""Call the get data source method over HTTP. - - Args: - request (~.backupvault.GetDataSourceRequest): - The request object. Request message for getting a - DataSource instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.DataSource: - Message describing a DataSource - object. Datasource object used to - represent Datasource details for both - admin and basic view. - - """ - - http_options = _BaseBackupDRRestTransport._BaseGetDataSource._get_http_options() - - request, metadata = self._interceptor.pre_get_data_source(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetDataSource._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetDataSource._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetDataSource", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetDataSource", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetDataSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.DataSource() - pb_resp = backupvault.DataSource.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_source(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_source_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.DataSource.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_data_source", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetDataSource", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetManagementServer(_BaseBackupDRRestTransport._BaseGetManagementServer, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetManagementServer") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupdr.GetManagementServerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupdr.ManagementServer: - r"""Call the get management server method over HTTP. - - Args: - request (~.backupdr.GetManagementServerRequest): - The request object. Request message for getting a - management server instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupdr.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. - - """ - - http_options = _BaseBackupDRRestTransport._BaseGetManagementServer._get_http_options() - - request, metadata = self._interceptor.pre_get_management_server(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetManagementServer._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetManagementServer._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetManagementServer", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetManagementServer", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetManagementServer._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupdr.ManagementServer() - pb_resp = backupdr.ManagementServer.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_management_server(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_management_server_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupdr.ManagementServer.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_management_server", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetManagementServer", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _InitializeService(_BaseBackupDRRestTransport._BaseInitializeService, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.InitializeService") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupdr.InitializeServiceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the initialize service method over HTTP. - - Args: - request (~.backupdr.InitializeServiceRequest): - The request object. Request message for initializing the - service. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseInitializeService._get_http_options() - - request, metadata = self._interceptor.pre_initialize_service(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseInitializeService._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseInitializeService._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseInitializeService._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.InitializeService", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "InitializeService", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._InitializeService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_initialize_service(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_initialize_service_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.initialize_service", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "InitializeService", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackupPlanAssociations(_BaseBackupDRRestTransport._BaseListBackupPlanAssociations, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListBackupPlanAssociations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupplanassociation.ListBackupPlanAssociationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupplanassociation.ListBackupPlanAssociationsResponse: - r"""Call the list backup plan - associations method over HTTP. - - Args: - request (~.backupplanassociation.ListBackupPlanAssociationsRequest): - The request object. Request message for List - BackupPlanAssociation - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupplanassociation.ListBackupPlanAssociationsResponse: - Response message for List - BackupPlanAssociation - - """ - - http_options = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_plan_associations(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlanAssociations", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlanAssociations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListBackupPlanAssociations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupplanassociation.ListBackupPlanAssociationsResponse() - pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_plan_associations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_plan_associations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupplanassociation.ListBackupPlanAssociationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlanAssociations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackupPlans(_BaseBackupDRRestTransport._BaseListBackupPlans, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListBackupPlans") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupplan.ListBackupPlansRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupplan.ListBackupPlansResponse: - r"""Call the list backup plans method over HTTP. - - Args: - request (~.backupplan.ListBackupPlansRequest): - The request object. The request message for getting a list ``BackupPlan``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupplan.ListBackupPlansResponse: - The response message for getting a list of - ``BackupPlan``. - - """ - - http_options = _BaseBackupDRRestTransport._BaseListBackupPlans._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_plans(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListBackupPlans._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListBackupPlans._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlans", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlans", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListBackupPlans._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupplan.ListBackupPlansResponse() - pb_resp = backupplan.ListBackupPlansResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_plans(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_plans_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupplan.ListBackupPlansResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlans", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackups(_BaseBackupDRRestTransport._BaseListBackups, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListBackups") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.ListBackupsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.ListBackupsResponse: - r"""Call the list backups method over HTTP. - - Args: - request (~.backupvault.ListBackupsRequest): - The request object. Request message for listing Backups. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.ListBackupsResponse: - Response message for listing Backups. - """ - - http_options = _BaseBackupDRRestTransport._BaseListBackups._get_http_options() - - request, metadata = self._interceptor.pre_list_backups(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListBackups._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListBackups._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackups", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackups", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListBackups._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.ListBackupsResponse() - pb_resp = backupvault.ListBackupsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backups(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backups_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.ListBackupsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backups", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackups", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBackupVaults(_BaseBackupDRRestTransport._BaseListBackupVaults, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListBackupVaults") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.ListBackupVaultsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.ListBackupVaultsResponse: - r"""Call the list backup vaults method over HTTP. - - Args: - request (~.backupvault.ListBackupVaultsRequest): - The request object. Request message for listing - backupvault stores. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.ListBackupVaultsResponse: - Response message for listing - BackupVaults. - - """ - - http_options = _BaseBackupDRRestTransport._BaseListBackupVaults._get_http_options() - - request, metadata = self._interceptor.pre_list_backup_vaults(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListBackupVaults._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListBackupVaults._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupVaults", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupVaults", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListBackupVaults._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.ListBackupVaultsResponse() - pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_backup_vaults(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_vaults_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.ListBackupVaultsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupVaults", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataSources(_BaseBackupDRRestTransport._BaseListDataSources, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListDataSources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupvault.ListDataSourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupvault.ListDataSourcesResponse: - r"""Call the list data sources method over HTTP. - - Args: - request (~.backupvault.ListDataSourcesRequest): - The request object. Request message for listing - DataSources. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupvault.ListDataSourcesResponse: - Response message for listing - DataSources. - - """ - - http_options = _BaseBackupDRRestTransport._BaseListDataSources._get_http_options() - - request, metadata = self._interceptor.pre_list_data_sources(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListDataSources._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListDataSources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListDataSources", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListDataSources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListDataSources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupvault.ListDataSourcesResponse() - pb_resp = backupvault.ListDataSourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_sources(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_sources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupvault.ListDataSourcesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_data_sources", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListDataSources", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListManagementServers(_BaseBackupDRRestTransport._BaseListManagementServers, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListManagementServers") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: backupdr.ListManagementServersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> backupdr.ListManagementServersResponse: - r"""Call the list management servers method over HTTP. - - Args: - request (~.backupdr.ListManagementServersRequest): - The request object. Request message for listing - management servers. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.backupdr.ListManagementServersResponse: - Response message for listing - management servers. - - """ - - http_options = _BaseBackupDRRestTransport._BaseListManagementServers._get_http_options() - - request, metadata = self._interceptor.pre_list_management_servers(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListManagementServers._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListManagementServers._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListManagementServers", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListManagementServers", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListManagementServers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = backupdr.ListManagementServersResponse() - pb_resp = backupdr.ListManagementServersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_management_servers(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_management_servers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = backupdr.ListManagementServersResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_management_servers", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListManagementServers", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RestoreBackup(_BaseBackupDRRestTransport._BaseRestoreBackup, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.RestoreBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupvault.RestoreBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore backup method over HTTP. - - Args: - request (~.backupvault.RestoreBackupRequest): - The request object. Request message for restoring from a - Backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseRestoreBackup._get_http_options() - - request, metadata = self._interceptor.pre_restore_backup(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseRestoreBackup._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseRestoreBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseRestoreBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.RestoreBackup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "RestoreBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._RestoreBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restore_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restore_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.restore_backup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "RestoreBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TriggerBackup(_BaseBackupDRRestTransport._BaseTriggerBackup, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.TriggerBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupplanassociation.TriggerBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the trigger backup method over HTTP. - - Args: - request (~.backupplanassociation.TriggerBackupRequest): - The request object. Request message for triggering a - backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseTriggerBackup._get_http_options() - - request, metadata = self._interceptor.pre_trigger_backup(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseTriggerBackup._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseTriggerBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseTriggerBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.TriggerBackup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "TriggerBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._TriggerBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_trigger_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_trigger_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.trigger_backup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "TriggerBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackup(_BaseBackupDRRestTransport._BaseUpdateBackup, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.UpdateBackup") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupvault.UpdateBackupRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update backup method over HTTP. - - Args: - request (~.backupvault.UpdateBackupRequest): - The request object. Request message for updating a - Backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseUpdateBackup._get_http_options() - - request, metadata = self._interceptor.pre_update_backup(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseUpdateBackup._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseUpdateBackup._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseUpdateBackup._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateBackup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateBackup", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._UpdateBackup._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.update_backup", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateBackup", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBackupVault(_BaseBackupDRRestTransport._BaseUpdateBackupVault, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.UpdateBackupVault") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupvault.UpdateBackupVaultRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update backup vault method over HTTP. - - Args: - request (~.backupvault.UpdateBackupVaultRequest): - The request object. Request message for updating a - BackupVault. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseUpdateBackupVault._get_http_options() - - request, metadata = self._interceptor.pre_update_backup_vault(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseUpdateBackupVault._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseUpdateBackupVault._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseUpdateBackupVault._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateBackupVault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateBackupVault", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._UpdateBackupVault._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_backup_vault(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_vault_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.update_backup_vault", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateBackupVault", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataSource(_BaseBackupDRRestTransport._BaseUpdateDataSource, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.UpdateDataSource") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: backupvault.UpdateDataSourceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update data source method over HTTP. - - Args: - request (~.backupvault.UpdateDataSourceRequest): - The request object. Request message for updating a data - source instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBackupDRRestTransport._BaseUpdateDataSource._get_http_options() - - request, metadata = self._interceptor.pre_update_data_source(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseUpdateDataSource._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseUpdateDataSource._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseUpdateDataSource._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateDataSource", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateDataSource", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._UpdateDataSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_data_source(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_data_source_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.update_data_source", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateDataSource", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_backup_plan(self) -> Callable[ - [backupplan.CreateBackupPlanRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupPlan(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup_plan_association(self) -> Callable[ - [backupplanassociation.CreateBackupPlanAssociationRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_backup_vault(self) -> Callable[ - [backupvault.CreateBackupVaultRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBackupVault(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_management_server(self) -> Callable[ - [backupdr.CreateManagementServerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateManagementServer(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup(self) -> Callable[ - [backupvault.DeleteBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_plan(self) -> Callable[ - [backupplan.DeleteBackupPlanRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupPlan(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_plan_association(self) -> Callable[ - [backupplanassociation.DeleteBackupPlanAssociationRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_backup_vault(self) -> Callable[ - [backupvault.DeleteBackupVaultRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBackupVault(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_management_server(self) -> Callable[ - [backupdr.DeleteManagementServerRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteManagementServer(self._session, self._host, self._interceptor) # type: ignore - - @property - def fetch_usable_backup_vaults(self) -> Callable[ - [backupvault.FetchUsableBackupVaultsRequest], - backupvault.FetchUsableBackupVaultsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FetchUsableBackupVaults(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup(self) -> Callable[ - [backupvault.GetBackupRequest], - backupvault.Backup]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_plan(self) -> Callable[ - [backupplan.GetBackupPlanRequest], - backupplan.BackupPlan]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupPlan(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_plan_association(self) -> Callable[ - [backupplanassociation.GetBackupPlanAssociationRequest], - backupplanassociation.BackupPlanAssociation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_backup_vault(self) -> Callable[ - [backupvault.GetBackupVaultRequest], - backupvault.BackupVault]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBackupVault(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_source(self) -> Callable[ - [backupvault.GetDataSourceRequest], - backupvault.DataSource]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_management_server(self) -> Callable[ - [backupdr.GetManagementServerRequest], - backupdr.ManagementServer]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetManagementServer(self._session, self._host, self._interceptor) # type: ignore - - @property - def initialize_service(self) -> Callable[ - [backupdr.InitializeServiceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._InitializeService(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_plan_associations(self) -> Callable[ - [backupplanassociation.ListBackupPlanAssociationsRequest], - backupplanassociation.ListBackupPlanAssociationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupPlanAssociations(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_plans(self) -> Callable[ - [backupplan.ListBackupPlansRequest], - backupplan.ListBackupPlansResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupPlans(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backups(self) -> Callable[ - [backupvault.ListBackupsRequest], - backupvault.ListBackupsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackups(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_backup_vaults(self) -> Callable[ - [backupvault.ListBackupVaultsRequest], - backupvault.ListBackupVaultsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_sources(self) -> Callable[ - [backupvault.ListDataSourcesRequest], - backupvault.ListDataSourcesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_management_servers(self) -> Callable[ - [backupdr.ListManagementServersRequest], - backupdr.ListManagementServersResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListManagementServers(self._session, self._host, self._interceptor) # type: ignore - - @property - def restore_backup(self) -> Callable[ - [backupvault.RestoreBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestoreBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def trigger_backup(self) -> Callable[ - [backupplanassociation.TriggerBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TriggerBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup(self) -> Callable[ - [backupvault.UpdateBackupRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_backup_vault(self) -> Callable[ - [backupvault.UpdateBackupVaultRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBackupVault(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_data_source(self) -> Callable[ - [backupvault.UpdateDataSourceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataSource(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseBackupDRRestTransport._BaseGetLocation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseBackupDRRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetLocation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseBackupDRRestTransport._BaseListLocations, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseBackupDRRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListLocations", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(_BaseBackupDRRestTransport._BaseGetIamPolicy, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options = _BaseBackupDRRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(_BaseBackupDRRestTransport._BaseSetIamPolicy, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options = _BaseBackupDRRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "SetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(_BaseBackupDRRestTransport._BaseTestIamPermissions, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options = _BaseBackupDRRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "TestIamPermissions", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseBackupDRRestTransport._BaseCancelOperation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBackupDRRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseBackupDRRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.CancelOperation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseBackupDRRestTransport._BaseDeleteOperation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBackupDRRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseBackupDRRestTransport._BaseGetOperation, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseBackupDRRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetOperation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseBackupDRRestTransport._BaseListOperations, BackupDRRestStub): - def __hash__(self): - return hash("BackupDRRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseBackupDRRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseBackupDRRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListOperations", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BackupDRRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'BackupDRRestTransport', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py deleted file mode 100644 index 59707ac5a942..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py +++ /dev/null @@ -1,1474 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import BackupDRTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseBackupDRRestTransport(BackupDRTransport): - """Base REST backend transport for BackupDR. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'backupdr.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'backupdr.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateBackupPlan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupPlanId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/backupPlans', - 'body': 'backup_plan', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplan.CreateBackupPlanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseCreateBackupPlan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackupPlanAssociation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupPlanAssociationId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/backupPlanAssociations', - 'body': 'backup_plan_association', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplanassociation.CreateBackupPlanAssociationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseCreateBackupPlanAssociation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateBackupVault: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "backupVaultId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/backupVaults', - 'body': 'backup_vault', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.CreateBackupVaultRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseCreateBackupVault._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateManagementServer: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "managementServerId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/managementServers', - 'body': 'management_server', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupdr.CreateManagementServerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseCreateManagementServer._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.DeleteBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseDeleteBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackupPlan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/backupPlans/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplan.DeleteBackupPlanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseDeleteBackupPlan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackupPlanAssociation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/backupPlanAssociations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplanassociation.DeleteBackupPlanAssociationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseDeleteBackupPlanAssociation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBackupVault: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/backupVaults/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.DeleteBackupVaultRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseDeleteBackupVault._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteManagementServer: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/managementServers/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupdr.DeleteManagementServerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseDeleteManagementServer._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseFetchUsableBackupVaults: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.FetchUsableBackupVaultsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.GetBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseGetBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackupPlan: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/backupPlans/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplan.GetBackupPlanRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseGetBackupPlan._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackupPlanAssociation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/backupPlanAssociations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplanassociation.GetBackupPlanAssociationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBackupVault: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/backupVaults/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.GetBackupVaultRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseGetBackupVault._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataSource: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.GetDataSourceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseGetDataSource._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetManagementServer: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/managementServers/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupdr.GetManagementServerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseGetManagementServer._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseInitializeService: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/serviceConfig}:initialize', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupdr.InitializeServiceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseInitializeService._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackupPlanAssociations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/backupPlanAssociations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplanassociation.ListBackupPlanAssociationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackupPlans: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/backupPlans', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplan.ListBackupPlansRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseListBackupPlans._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackups: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.ListBackupsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseListBackups._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBackupVaults: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/backupVaults', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.ListBackupVaultsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseListBackupVaults._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataSources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.ListDataSourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseListDataSources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListManagementServers: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/managementServers', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupdr.ListManagementServersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseListManagementServers._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRestoreBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.RestoreBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseRestoreBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTriggerBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupplanassociation.TriggerBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseTriggerBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackup: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}', - 'body': 'backup', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.UpdateBackupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseUpdateBackup._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBackupVault: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}', - 'body': 'backup_vault', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.UpdateBackupVaultRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseUpdateBackupVault._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataSource: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}', - 'body': 'data_source', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = backupvault.UpdateDataSourceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBackupDRRestTransport._BaseUpdateDataSource._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/managementServers/*}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/managementServers/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/managementServers/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseBackupDRRestTransport', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/__init__.py deleted file mode 100644 index ba565583f020..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/__init__.py +++ /dev/null @@ -1,212 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .backupdr import ( - CreateManagementServerRequest, - DeleteManagementServerRequest, - GetManagementServerRequest, - InitializeServiceRequest, - InitializeServiceResponse, - ListManagementServersRequest, - ListManagementServersResponse, - ManagementServer, - ManagementURI, - NetworkConfig, - OperationMetadata, - WorkforceIdentityBasedManagementURI, - WorkforceIdentityBasedOAuth2ClientID, -) -from .backupplan import ( - BackupPlan, - BackupRule, - BackupWindow, - CreateBackupPlanRequest, - DeleteBackupPlanRequest, - GetBackupPlanRequest, - ListBackupPlansRequest, - ListBackupPlansResponse, - StandardSchedule, - WeekDayOfMonth, -) -from .backupplanassociation import ( - BackupPlanAssociation, - CreateBackupPlanAssociationRequest, - DeleteBackupPlanAssociationRequest, - GetBackupPlanAssociationRequest, - ListBackupPlanAssociationsRequest, - ListBackupPlanAssociationsResponse, - RuleConfigInfo, - TriggerBackupRequest, -) -from .backupvault import ( - Backup, - BackupApplianceBackupConfig, - BackupApplianceLockInfo, - BackupConfigInfo, - BackupLock, - BackupVault, - CreateBackupVaultRequest, - DataSource, - DataSourceBackupApplianceApplication, - DataSourceGcpResource, - DeleteBackupRequest, - DeleteBackupVaultRequest, - FetchUsableBackupVaultsRequest, - FetchUsableBackupVaultsResponse, - GcpBackupConfig, - GcpResource, - GetBackupRequest, - GetBackupVaultRequest, - GetDataSourceRequest, - ListBackupsRequest, - ListBackupsResponse, - ListBackupVaultsRequest, - ListBackupVaultsResponse, - ListDataSourcesRequest, - ListDataSourcesResponse, - RestoreBackupRequest, - RestoreBackupResponse, - ServiceLockInfo, - TargetResource, - UpdateBackupRequest, - UpdateBackupVaultRequest, - UpdateDataSourceRequest, - BackupConfigState, - BackupVaultView, - BackupView, -) -from .backupvault_ba import ( - BackupApplianceBackupProperties, -) -from .backupvault_gce import ( - AcceleratorConfig, - AccessConfig, - AdvancedMachineFeatures, - AliasIpRange, - AllocationAffinity, - AttachedDisk, - ComputeInstanceBackupProperties, - ComputeInstanceDataSourceProperties, - ComputeInstanceRestoreProperties, - ComputeInstanceTargetEnvironment, - ConfidentialInstanceConfig, - CustomerEncryptionKey, - DisplayDevice, - Entry, - GuestOsFeature, - InstanceParams, - Metadata, - NetworkInterface, - NetworkPerformanceConfig, - Scheduling, - SchedulingDuration, - ServiceAccount, - Tags, - KeyRevocationActionType, -) - -__all__ = ( - 'CreateManagementServerRequest', - 'DeleteManagementServerRequest', - 'GetManagementServerRequest', - 'InitializeServiceRequest', - 'InitializeServiceResponse', - 'ListManagementServersRequest', - 'ListManagementServersResponse', - 'ManagementServer', - 'ManagementURI', - 'NetworkConfig', - 'OperationMetadata', - 'WorkforceIdentityBasedManagementURI', - 'WorkforceIdentityBasedOAuth2ClientID', - 'BackupPlan', - 'BackupRule', - 'BackupWindow', - 'CreateBackupPlanRequest', - 'DeleteBackupPlanRequest', - 'GetBackupPlanRequest', - 'ListBackupPlansRequest', - 'ListBackupPlansResponse', - 'StandardSchedule', - 'WeekDayOfMonth', - 'BackupPlanAssociation', - 'CreateBackupPlanAssociationRequest', - 'DeleteBackupPlanAssociationRequest', - 'GetBackupPlanAssociationRequest', - 'ListBackupPlanAssociationsRequest', - 'ListBackupPlanAssociationsResponse', - 'RuleConfigInfo', - 'TriggerBackupRequest', - 'Backup', - 'BackupApplianceBackupConfig', - 'BackupApplianceLockInfo', - 'BackupConfigInfo', - 'BackupLock', - 'BackupVault', - 'CreateBackupVaultRequest', - 'DataSource', - 'DataSourceBackupApplianceApplication', - 'DataSourceGcpResource', - 'DeleteBackupRequest', - 'DeleteBackupVaultRequest', - 'FetchUsableBackupVaultsRequest', - 'FetchUsableBackupVaultsResponse', - 'GcpBackupConfig', - 'GcpResource', - 'GetBackupRequest', - 'GetBackupVaultRequest', - 'GetDataSourceRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'ListBackupVaultsRequest', - 'ListBackupVaultsResponse', - 'ListDataSourcesRequest', - 'ListDataSourcesResponse', - 'RestoreBackupRequest', - 'RestoreBackupResponse', - 'ServiceLockInfo', - 'TargetResource', - 'UpdateBackupRequest', - 'UpdateBackupVaultRequest', - 'UpdateDataSourceRequest', - 'BackupConfigState', - 'BackupVaultView', - 'BackupView', - 'BackupApplianceBackupProperties', - 'AcceleratorConfig', - 'AccessConfig', - 'AdvancedMachineFeatures', - 'AliasIpRange', - 'AllocationAffinity', - 'AttachedDisk', - 'ComputeInstanceBackupProperties', - 'ComputeInstanceDataSourceProperties', - 'ComputeInstanceRestoreProperties', - 'ComputeInstanceTargetEnvironment', - 'ConfidentialInstanceConfig', - 'CustomerEncryptionKey', - 'DisplayDevice', - 'Entry', - 'GuestOsFeature', - 'InstanceParams', - 'Metadata', - 'NetworkInterface', - 'NetworkPerformanceConfig', - 'Scheduling', - 'SchedulingDuration', - 'ServiceAccount', - 'Tags', - 'KeyRevocationActionType', -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupdr.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupdr.py deleted file mode 100644 index 0b7a7b1db1d8..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupdr.py +++ /dev/null @@ -1,699 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.backupdr.v1', - manifest={ - 'NetworkConfig', - 'ManagementURI', - 'WorkforceIdentityBasedManagementURI', - 'WorkforceIdentityBasedOAuth2ClientID', - 'ManagementServer', - 'ListManagementServersRequest', - 'ListManagementServersResponse', - 'GetManagementServerRequest', - 'CreateManagementServerRequest', - 'DeleteManagementServerRequest', - 'InitializeServiceRequest', - 'InitializeServiceResponse', - 'OperationMetadata', - }, -) - - -class NetworkConfig(proto.Message): - r"""Network configuration for ManagementServer instance. - - Attributes: - network (str): - Optional. The resource name of the Google - Compute Engine VPC network to which the - ManagementServer instance is connected. - peering_mode (google.cloud.backupdr_v1.types.NetworkConfig.PeeringMode): - Optional. The network connect mode of the ManagementServer - instance. For this version, only PRIVATE_SERVICE_ACCESS is - supported. - """ - class PeeringMode(proto.Enum): - r"""VPC peering modes supported by Cloud BackupDR. - - Values: - PEERING_MODE_UNSPECIFIED (0): - Peering mode not set. - PRIVATE_SERVICE_ACCESS (1): - Connect using Private Service Access to the - Management Server. Private services access - provides an IP address range for multiple Google - Cloud services, including Cloud BackupDR. - """ - PEERING_MODE_UNSPECIFIED = 0 - PRIVATE_SERVICE_ACCESS = 1 - - network: str = proto.Field( - proto.STRING, - number=1, - ) - peering_mode: PeeringMode = proto.Field( - proto.ENUM, - number=2, - enum=PeeringMode, - ) - - -class ManagementURI(proto.Message): - r"""ManagementURI for the Management Server resource. - - Attributes: - web_ui (str): - Output only. The ManagementServer AGM/RD - WebUI URL. - api (str): - Output only. The ManagementServer AGM/RD API - URL. - """ - - web_ui: str = proto.Field( - proto.STRING, - number=1, - ) - api: str = proto.Field( - proto.STRING, - number=2, - ) - - -class WorkforceIdentityBasedManagementURI(proto.Message): - r"""ManagementURI depending on the Workforce Identity i.e. either - 1p or 3p. - - Attributes: - first_party_management_uri (str): - Output only. First party Management URI for - Google Identities. - third_party_management_uri (str): - Output only. Third party Management URI for - External Identity Providers. - """ - - first_party_management_uri: str = proto.Field( - proto.STRING, - number=1, - ) - third_party_management_uri: str = proto.Field( - proto.STRING, - number=2, - ) - - -class WorkforceIdentityBasedOAuth2ClientID(proto.Message): - r"""OAuth Client ID depending on the Workforce Identity i.e. - either 1p or 3p, - - Attributes: - first_party_oauth2_client_id (str): - Output only. First party OAuth Client ID for - Google Identities. - third_party_oauth2_client_id (str): - Output only. Third party OAuth Client ID for - External Identity Providers. - """ - - first_party_oauth2_client_id: str = proto.Field( - proto.STRING, - number=1, - ) - third_party_oauth2_client_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ManagementServer(proto.Message): - r"""ManagementServer describes a single BackupDR ManagementServer - instance. - - Attributes: - name (str): - Output only. Identifier. The resource name. - description (str): - Optional. The description of the - ManagementServer instance (2048 characters or - less). - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user provided - metadata. Labels currently defined: - - 1. migrate_from_go= If set to true, the MS is - created in migration ready mode. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - updated. - type_ (google.cloud.backupdr_v1.types.ManagementServer.InstanceType): - Optional. The type of the ManagementServer - resource. - management_uri (google.cloud.backupdr_v1.types.ManagementURI): - Output only. The hostname or ip address of - the exposed AGM endpoints, used by clients to - connect to AGM/RD graphical user interface and - APIs. - workforce_identity_based_management_uri (google.cloud.backupdr_v1.types.WorkforceIdentityBasedManagementURI): - Output only. The hostnames of the exposed AGM - endpoints for both types of user i.e. 1p and 3p, - used to connect AGM/RM UI. - state (google.cloud.backupdr_v1.types.ManagementServer.InstanceState): - Output only. The ManagementServer state. - networks (MutableSequence[google.cloud.backupdr_v1.types.NetworkConfig]): - Optional. VPC networks to which the - ManagementServer instance is connected. For this - version, only a single network is supported. - This field is optional if MS is created without - PSA - etag (str): - Optional. Server specified ETag for the - ManagementServer resource to prevent - simultaneous updates from overwiting each other. - oauth2_client_id (str): - Output only. The OAuth 2.0 client id is required to make API - calls to the BackupDR instance API of this ManagementServer. - This is the value that should be provided in the 'aud' field - of the OIDC ID Token (see openid specification - https://openid.net/specs/openid-connect-core-1_0.html#IDToken). - workforce_identity_based_oauth2_client_id (google.cloud.backupdr_v1.types.WorkforceIdentityBasedOAuth2ClientID): - Output only. The OAuth client IDs for both - types of user i.e. 1p and 3p. - ba_proxy_uri (MutableSequence[str]): - Output only. The hostname or ip address of - the exposed AGM endpoints, used by BAs to - connect to BA proxy. - satisfies_pzs (google.protobuf.wrappers_pb2.BoolValue): - Output only. Reserved for future use. - satisfies_pzi (bool): - Output only. Reserved for future use. - """ - class InstanceType(proto.Enum): - r"""Type of backup service resource. - - Values: - INSTANCE_TYPE_UNSPECIFIED (0): - Instance type is not mentioned. - BACKUP_RESTORE (1): - Instance for backup and restore management - (i.e., AGM). - """ - INSTANCE_TYPE_UNSPECIFIED = 0 - BACKUP_RESTORE = 1 - - class InstanceState(proto.Enum): - r"""State of Management server instance. - - Values: - INSTANCE_STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - The instance is being created. - READY (2): - The instance has been created and is fully - usable. - UPDATING (3): - The instance configuration is being updated. - Certain kinds of updates may cause the instance - to become unusable while the update is in - progress. - DELETING (4): - The instance is being deleted. - REPAIRING (5): - The instance is being repaired and may be - unstable. - MAINTENANCE (6): - Maintenance is being performed on this - instance. - ERROR (7): - The instance is experiencing an issue and - might be unusable. You can get further details - from the statusMessage field of Instance - resource. - """ - INSTANCE_STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - UPDATING = 3 - DELETING = 4 - REPAIRING = 5 - MAINTENANCE = 6 - ERROR = 7 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=9, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - type_: InstanceType = proto.Field( - proto.ENUM, - number=14, - enum=InstanceType, - ) - management_uri: 'ManagementURI' = proto.Field( - proto.MESSAGE, - number=11, - message='ManagementURI', - ) - workforce_identity_based_management_uri: 'WorkforceIdentityBasedManagementURI' = proto.Field( - proto.MESSAGE, - number=16, - message='WorkforceIdentityBasedManagementURI', - ) - state: InstanceState = proto.Field( - proto.ENUM, - number=7, - enum=InstanceState, - ) - networks: MutableSequence['NetworkConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='NetworkConfig', - ) - etag: str = proto.Field( - proto.STRING, - number=13, - ) - oauth2_client_id: str = proto.Field( - proto.STRING, - number=15, - ) - workforce_identity_based_oauth2_client_id: 'WorkforceIdentityBasedOAuth2ClientID' = proto.Field( - proto.MESSAGE, - number=17, - message='WorkforceIdentityBasedOAuth2ClientID', - ) - ba_proxy_uri: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=18, - ) - satisfies_pzs: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=19, - message=wrappers_pb2.BoolValue, - ) - satisfies_pzi: bool = proto.Field( - proto.BOOL, - number=20, - ) - - -class ListManagementServersRequest(proto.Message): - r"""Request message for listing management servers. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The project and location for which to retrieve - management servers information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - BackupDR, locations map to Google Cloud regions, for example - **us-central1**. To retrieve management servers for all - locations, use "-" for the '{location}' value. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - - This field is a member of `oneof`_ ``_filter``. - order_by (str): - Optional. Hint for how to order the results. - - This field is a member of `oneof`_ ``_order_by``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - optional=True, - ) - - -class ListManagementServersResponse(proto.Message): - r"""Response message for listing management servers. - - Attributes: - management_servers (MutableSequence[google.cloud.backupdr_v1.types.ManagementServer]): - The list of ManagementServer instances in the - project for the specified location. - - If the '{location}' value in the request is "-", - the response contains a list of instances from - all locations. In case any location is - unreachable, the response will only return - management servers in reachable locations and - the 'unreachable' field will be populated with a - list of unreachable locations. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - management_servers: MutableSequence['ManagementServer'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ManagementServer', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetManagementServerRequest(proto.Message): - r"""Request message for getting a management server instance. - - Attributes: - name (str): - Required. Name of the management server resource name, in - the format - 'projects/{project_id}/locations/{location}/managementServers/{resource_name}' - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateManagementServerRequest(proto.Message): - r"""Request message for creating a management server instance. - - Attributes: - parent (str): - Required. The management server project and location in the - format 'projects/{project_id}/locations/{location}'. In - Cloud Backup and DR locations map to Google Cloud regions, - for example **us-central1**. - management_server_id (str): - Required. The name of the management server - to create. The name must be unique for the - specified project and location. - management_server (google.cloud.backupdr_v1.types.ManagementServer): - Required. A [management server - resource][google.cloud.backupdr.v1.ManagementServer] - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - management_server_id: str = proto.Field( - proto.STRING, - number=2, - ) - management_server: 'ManagementServer' = proto.Field( - proto.MESSAGE, - number=3, - message='ManagementServer', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class DeleteManagementServerRequest(proto.Message): - r"""Request message for deleting a management server instance. - - Attributes: - name (str): - Required. Name of the resource - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class InitializeServiceRequest(proto.Message): - r"""Request message for initializing the service. - - Attributes: - name (str): - Required. The resource name of the serviceConfig used to - initialize the service. Format: - ``projects/{project_id}/locations/{location}/serviceConfig``. - resource_type (str): - Required. The resource type to which the - default service config will be applied. Examples - include, "compute.googleapis.com/Instance" and - "storage.googleapis.com/Bucket". - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - resource_type: str = proto.Field( - proto.STRING, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class InitializeServiceResponse(proto.Message): - r"""Response message for initializing the service. - - Attributes: - backup_vault_name (str): - The resource name of the default ``BackupVault`` created. - Format: - ``projects/{project_id}/locations/{location}/backupVaults/{backup_vault_id}``. - backup_plan_name (str): - The resource name of the default ``BackupPlan`` created. - Format: - ``projects/{project_id}/locations/{location}/backupPlans/{backup_plan_id}``. - """ - - backup_vault_name: str = proto.Field( - proto.STRING, - number=1, - ) - backup_plan_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have - [google.longrunning.Operation.error][google.longrunning.Operation.error] - value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to 'Code.CANCELLED'. - api_version (str): - Output only. API version used to start the - operation. - additional_info (MutableMapping[str, str]): - Output only. AdditionalInfo contains - additional Info related to backup plan - association resource. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - additional_info: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupplan.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupplan.py deleted file mode 100644 index b0ccb0457863..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupplan.py +++ /dev/null @@ -1,652 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import month_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.backupdr.v1', - manifest={ - 'BackupPlan', - 'BackupRule', - 'StandardSchedule', - 'BackupWindow', - 'WeekDayOfMonth', - 'CreateBackupPlanRequest', - 'ListBackupPlansRequest', - 'ListBackupPlansResponse', - 'GetBackupPlanRequest', - 'DeleteBackupPlanRequest', - }, -) - - -class BackupPlan(proto.Message): - r"""A ``BackupPlan`` specifies some common fields, such as - ``description`` as well as one or more ``BackupRule`` messages. Each - ``BackupRule`` has a retention policy and defines a schedule by - which the system is to perform backup workloads. - - Attributes: - name (str): - Output only. Identifier. The resource name of the - ``BackupPlan``. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - description (str): - Optional. The description of the ``BackupPlan`` resource. - - The description allows for additional details about - ``BackupPlan`` and its use cases to be provided. An example - description is the following: "This is a backup plan that - performs a daily backup at 6pm and retains data for 3 - months". The description must be at most 2048 characters. - labels (MutableMapping[str, str]): - Optional. This collection of key/value pairs - allows for custom labels to be supplied by the - user. Example, {"tag": "Weekly"}. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. When the ``BackupPlan`` was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. When the ``BackupPlan`` was last updated. - backup_rules (MutableSequence[google.cloud.backupdr_v1.types.BackupRule]): - Required. The backup rules for this ``BackupPlan``. There - must be at least one ``BackupRule`` message. - state (google.cloud.backupdr_v1.types.BackupPlan.State): - Output only. The ``State`` for the ``BackupPlan``. - resource_type (str): - Required. The resource type to which the ``BackupPlan`` will - be applied. Examples include, - "compute.googleapis.com/Instance", - "sqladmin.googleapis.com/Instance", or - "alloydb.googleapis.com/Cluster". - etag (str): - Optional. ``etag`` is returned from the service in the - response. As a user of the service, you may provide an etag - value in this field to prevent stale resources. - backup_vault (str): - Required. Resource name of backup vault which - will be used as storage location for backups. - Format: - - projects/{project}/locations/{location}/backupVaults/{backupvault} - backup_vault_service_account (str): - Output only. The Google Cloud Platform - Service Account to be used by the BackupVault - for taking backups. Specify the email address of - the Backup Vault Service Account. - """ - class State(proto.Enum): - r"""``State`` enumerates the possible states for a ``BackupPlan``. - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - The resource is being created. - ACTIVE (2): - The resource has been created and is fully - usable. - DELETING (3): - The resource is being deleted. - INACTIVE (4): - The resource has been created but is not - usable. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - DELETING = 3 - INACTIVE = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - backup_rules: MutableSequence['BackupRule'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='BackupRule', - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - resource_type: str = proto.Field( - proto.STRING, - number=8, - ) - etag: str = proto.Field( - proto.STRING, - number=9, - ) - backup_vault: str = proto.Field( - proto.STRING, - number=10, - ) - backup_vault_service_account: str = proto.Field( - proto.STRING, - number=11, - ) - - -class BackupRule(proto.Message): - r"""``BackupRule`` binds the backup schedule to a retention policy. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - rule_id (str): - Required. Immutable. The unique id of this ``BackupRule``. - The ``rule_id`` is unique per ``BackupPlan``.The ``rule_id`` - must start with a lowercase letter followed by up to 62 - lowercase letters, numbers, or hyphens. Pattern, - /[a-z][a-z0-9-]{,62}/. - backup_retention_days (int): - Required. Configures the duration for which - backup data will be kept. It is defined in - “days”. The value should be greater than or - equal to minimum enforced retention of the - backup vault. - - Minimum value is 1 and maximum value is 90 for - hourly backups. Minimum value is 1 and maximum - value is 90 for daily backups. Minimum value is - 7 and maximum value is 186 for weekly backups. - Minimum value is 30 and maximum value is 732 for - monthly backups. Minimum value is 365 and - maximum value is 36159 for yearly backups. - standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): - Required. Defines a schedule that runs within - the confines of a defined window of time. - - This field is a member of `oneof`_ ``backup_schedule_oneof``. - """ - - rule_id: str = proto.Field( - proto.STRING, - number=1, - ) - backup_retention_days: int = proto.Field( - proto.INT32, - number=4, - ) - standard_schedule: 'StandardSchedule' = proto.Field( - proto.MESSAGE, - number=5, - oneof='backup_schedule_oneof', - message='StandardSchedule', - ) - - -class StandardSchedule(proto.Message): - r"""``StandardSchedule`` defines a schedule that run within the confines - of a defined window of days. We can define recurrence type for - schedule as HOURLY, DAILY, WEEKLY, MONTHLY or YEARLY. - - Attributes: - recurrence_type (google.cloud.backupdr_v1.types.StandardSchedule.RecurrenceType): - Required. Specifies the ``RecurrenceType`` for the schedule. - hourly_frequency (int): - Optional. Specifies frequency for hourly backups. A hourly - frequency of 2 means jobs will run every 2 hours from start - time till end time defined. - - This is required for ``recurrence_type``, ``HOURLY`` and is - not applicable otherwise. A validation error will occur if a - value is supplied and ``recurrence_type`` is not ``HOURLY``. - - Value of hourly frequency should be between 6 and 23. - - Reason for limit : We found that there is bandwidth - limitation of 3GB/S for GMI while taking a backup and 5GB/S - while doing a restore. Given the amount of parallel backups - and restore we are targeting, this will potentially take the - backup time to mins and hours (in worst case scenario). - days_of_week (MutableSequence[google.type.dayofweek_pb2.DayOfWeek]): - Optional. Specifies days of week like, MONDAY or TUESDAY, on - which jobs will run. - - This is required for ``recurrence_type``, ``WEEKLY`` and is - not applicable otherwise. A validation error will occur if a - value is supplied and ``recurrence_type`` is not ``WEEKLY``. - days_of_month (MutableSequence[int]): - Optional. Specifies days of months like 1, 5, or 14 on which - jobs will run. - - Values for ``days_of_month`` are only applicable for - ``recurrence_type``, ``MONTHLY`` and ``YEARLY``. A - validation error will occur if other values are supplied. - week_day_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth): - Optional. Specifies a week day of the month like, FIRST - SUNDAY or LAST MONDAY, on which jobs will run. This will be - specified by two fields in ``WeekDayOfMonth``, one for the - day, e.g. ``MONDAY``, and one for the week, e.g. ``LAST``. - - This field is only applicable for ``recurrence_type``, - ``MONTHLY`` and ``YEARLY``. A validation error will occur if - other values are supplied. - months (MutableSequence[google.type.month_pb2.Month]): - Optional. Specifies the months of year, like ``FEBRUARY`` - and/or ``MAY``, on which jobs will run. - - This field is only applicable when ``recurrence_type`` is - ``YEARLY``. A validation error will occur if other values - are supplied. - backup_window (google.cloud.backupdr_v1.types.BackupWindow): - Required. A BackupWindow defines the window of day during - which backup jobs will run. Jobs are queued at the beginning - of the window and will be marked as ``NOT_RUN`` if they do - not start by the end of the window. - - Note: running jobs will not be cancelled at the end of the - window. - time_zone (str): - Required. The time zone to be used when interpreting the - schedule. The value of this field must be a time zone name - from the IANA tz database. See - https://en.wikipedia.org/wiki/List_of_tz_database_time_zones - for the list of valid timezone names. For e.g., - Europe/Paris. - """ - class RecurrenceType(proto.Enum): - r"""``RecurrenceTypes`` enumerates the applicable periodicity for the - schedule. - - Values: - RECURRENCE_TYPE_UNSPECIFIED (0): - recurrence type not set - HOURLY (1): - The ``BackupRule`` is to be applied hourly. - DAILY (2): - The ``BackupRule`` is to be applied daily. - WEEKLY (3): - The ``BackupRule`` is to be applied weekly. - MONTHLY (4): - The ``BackupRule`` is to be applied monthly. - YEARLY (5): - The ``BackupRule`` is to be applied yearly. - """ - RECURRENCE_TYPE_UNSPECIFIED = 0 - HOURLY = 1 - DAILY = 2 - WEEKLY = 3 - MONTHLY = 4 - YEARLY = 5 - - recurrence_type: RecurrenceType = proto.Field( - proto.ENUM, - number=1, - enum=RecurrenceType, - ) - hourly_frequency: int = proto.Field( - proto.INT32, - number=2, - ) - days_of_week: MutableSequence[dayofweek_pb2.DayOfWeek] = proto.RepeatedField( - proto.ENUM, - number=3, - enum=dayofweek_pb2.DayOfWeek, - ) - days_of_month: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=4, - ) - week_day_of_month: 'WeekDayOfMonth' = proto.Field( - proto.MESSAGE, - number=5, - message='WeekDayOfMonth', - ) - months: MutableSequence[month_pb2.Month] = proto.RepeatedField( - proto.ENUM, - number=6, - enum=month_pb2.Month, - ) - backup_window: 'BackupWindow' = proto.Field( - proto.MESSAGE, - number=7, - message='BackupWindow', - ) - time_zone: str = proto.Field( - proto.STRING, - number=8, - ) - - -class BackupWindow(proto.Message): - r"""``BackupWindow`` defines a window of the day during which backup - jobs will run. - - Attributes: - start_hour_of_day (int): - Required. The hour of day (0-23) when the - window starts for e.g. if value of start hour of - day is 6 that mean backup window start at 6:00. - end_hour_of_day (int): - Required. The hour of day (1-24) when the window end for - e.g. if value of end hour of day is 10 that mean backup - window end time is 10:00. - - End hour of day should be greater than start hour of day. 0 - <= start_hour_of_day < end_hour_of_day <= 24 - - End hour of day is not include in backup window that mean if - end_hour_of_day= 10 jobs should start before 10:00. - """ - - start_hour_of_day: int = proto.Field( - proto.INT32, - number=1, - ) - end_hour_of_day: int = proto.Field( - proto.INT32, - number=2, - ) - - -class WeekDayOfMonth(proto.Message): - r"""``WeekDayOfMonth`` defines the week day of the month on which the - backups will run. The message combines a ``WeekOfMonth`` and - ``DayOfWeek`` to produce values like ``FIRST``/``MONDAY`` or - ``LAST``/``FRIDAY``. - - Attributes: - week_of_month (google.cloud.backupdr_v1.types.WeekDayOfMonth.WeekOfMonth): - Required. Specifies the week of the month. - day_of_week (google.type.dayofweek_pb2.DayOfWeek): - Required. Specifies the day of the week. - """ - class WeekOfMonth(proto.Enum): - r"""``WeekOfMonth`` enumerates possible weeks in the month, e.g. the - first, third, or last week of the month. - - Values: - WEEK_OF_MONTH_UNSPECIFIED (0): - The zero value. Do not use. - FIRST (1): - The first week of the month. - SECOND (2): - The second week of the month. - THIRD (3): - The third week of the month. - FOURTH (4): - The fourth week of the month. - LAST (5): - The last week of the month. - """ - WEEK_OF_MONTH_UNSPECIFIED = 0 - FIRST = 1 - SECOND = 2 - THIRD = 3 - FOURTH = 4 - LAST = 5 - - week_of_month: WeekOfMonth = proto.Field( - proto.ENUM, - number=1, - enum=WeekOfMonth, - ) - day_of_week: dayofweek_pb2.DayOfWeek = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - - -class CreateBackupPlanRequest(proto.Message): - r"""The request message for creating a ``BackupPlan``. - - Attributes: - parent (str): - Required. The ``BackupPlan`` project and location in the - format ``projects/{project}/locations/{location}``. In Cloud - BackupDR locations map to GCP regions, for example - **us-central1**. - backup_plan_id (str): - Required. The name of the ``BackupPlan`` to create. The name - must be unique for the specified project and location.The - name must start with a lowercase letter followed by up to 62 - lowercase letters, numbers, or hyphens. Pattern, - /[a-z][a-z0-9-]{,62}/. - backup_plan (google.cloud.backupdr_v1.types.BackupPlan): - Required. The ``BackupPlan`` resource object to create. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_plan_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup_plan: 'BackupPlan' = proto.Field( - proto.MESSAGE, - number=3, - message='BackupPlan', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupPlansRequest(proto.Message): - r"""The request message for getting a list ``BackupPlan``. - - Attributes: - parent (str): - Required. The project and location for which to retrieve - ``BackupPlans`` information. Format: - ``projects/{project}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for e.g. - **us-central1**. To retrieve backup plans for all locations, - use "-" for the ``{location}`` value. - page_size (int): - Optional. The maximum number of ``BackupPlans`` to return in - a single response. If not specified, a default value will be - chosen by the service. Note that the response may include a - partial list and a caller should only rely on the response's - [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - Optional. The value of - [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] - received from a previous ``ListBackupPlans`` call. Provide - this to retrieve the subsequent page in a multi-page list of - results. When paginating, all other parameters provided to - ``ListBackupPlans`` must match the call that provided the - page token. - filter (str): - Optional. Field match expression used to - filter the results. - order_by (str): - Optional. Field by which to sort the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListBackupPlansResponse(proto.Message): - r"""The response message for getting a list of ``BackupPlan``. - - Attributes: - backup_plans (MutableSequence[google.cloud.backupdr_v1.types.BackupPlan]): - The list of ``BackupPlans`` in the project for the specified - location. - - If the ``{location}`` value in the request is "-", the - response contains a list of resources from all locations. In - case any location is unreachable, the response will only - return backup plans in reachable locations and the - 'unreachable' field will be populated with a list of - unreachable locations. BackupPlan - next_page_token (str): - A token which may be sent as - [page_token][google.cloud.backupdr.v1.ListBackupPlansRequest.page_token] - in a subsequent ``ListBackupPlans`` call to retrieve the - next page of results. If this field is omitted or empty, - then there are no more results to return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - backup_plans: MutableSequence['BackupPlan'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BackupPlan', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetBackupPlanRequest(proto.Message): - r"""The request message for getting a ``BackupPlan``. - - Attributes: - name (str): - Required. The resource name of the ``BackupPlan`` to - retrieve. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupPlanRequest(proto.Message): - r"""The request message for deleting a ``BackupPlan``. - - Attributes: - name (str): - Required. The resource name of the ``BackupPlan`` to delete. - - Format: - ``projects/{project}/locations/{location}/backupPlans/{backup_plan}`` - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupplanassociation.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupplanassociation.py deleted file mode 100644 index 6c00978ee110..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupplanassociation.py +++ /dev/null @@ -1,447 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.backupdr.v1', - manifest={ - 'BackupPlanAssociation', - 'RuleConfigInfo', - 'CreateBackupPlanAssociationRequest', - 'ListBackupPlanAssociationsRequest', - 'ListBackupPlanAssociationsResponse', - 'GetBackupPlanAssociationRequest', - 'DeleteBackupPlanAssociationRequest', - 'TriggerBackupRequest', - }, -) - - -class BackupPlanAssociation(proto.Message): - r"""A BackupPlanAssociation represents a single - BackupPlanAssociation which contains details like workload, - backup plan etc - - Attributes: - name (str): - Output only. Identifier. The resource name of - BackupPlanAssociation in below format Format : - - projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId} - resource_type (str): - Required. Immutable. Resource type of - workload on which backupplan is applied - resource (str): - Required. Immutable. Resource name of - workload on which backupplan is applied - backup_plan (str): - Required. Resource name of backup plan which - needs to be applied on workload. Format: - - projects/{project}/locations/{location}/backupPlans/{backupPlanId} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - updated. - state (google.cloud.backupdr_v1.types.BackupPlanAssociation.State): - Output only. The BackupPlanAssociation - resource state. - rules_config_info (MutableSequence[google.cloud.backupdr_v1.types.RuleConfigInfo]): - Output only. The config info related to - backup rules. - data_source (str): - Output only. Resource name of data source - which will be used as storage location for - backups taken. Format : - - projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} - """ - class State(proto.Enum): - r"""Enum for State of BackupPlan Association - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - The resource is being created. - ACTIVE (2): - The resource has been created and is fully - usable. - DELETING (3): - The resource is being deleted. - INACTIVE (4): - The resource has been created but is not - usable. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - DELETING = 3 - INACTIVE = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - resource_type: str = proto.Field( - proto.STRING, - number=2, - ) - resource: str = proto.Field( - proto.STRING, - number=3, - ) - backup_plan: str = proto.Field( - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - rules_config_info: MutableSequence['RuleConfigInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='RuleConfigInfo', - ) - data_source: str = proto.Field( - proto.STRING, - number=9, - ) - - -class RuleConfigInfo(proto.Message): - r"""Message for rules config info. - - Attributes: - rule_id (str): - Output only. Backup Rule id fetched from - backup plan. - last_backup_state (google.cloud.backupdr_v1.types.RuleConfigInfo.LastBackupState): - Output only. The last backup state for rule. - last_backup_error (google.rpc.status_pb2.Status): - Output only. google.rpc.Status object to - store the last backup error. - last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The point in time when the last - successful backup was captured from the source. - """ - class LastBackupState(proto.Enum): - r"""Enum for LastBackupState - - Values: - LAST_BACKUP_STATE_UNSPECIFIED (0): - State not set. - FIRST_BACKUP_PENDING (1): - The first backup is pending. - PERMISSION_DENIED (2): - The most recent backup could not be - run/failed because of the lack of permissions. - SUCCEEDED (3): - The last backup operation succeeded. - FAILED (4): - The last backup operation failed. - """ - LAST_BACKUP_STATE_UNSPECIFIED = 0 - FIRST_BACKUP_PENDING = 1 - PERMISSION_DENIED = 2 - SUCCEEDED = 3 - FAILED = 4 - - rule_id: str = proto.Field( - proto.STRING, - number=1, - ) - last_backup_state: LastBackupState = proto.Field( - proto.ENUM, - number=3, - enum=LastBackupState, - ) - last_backup_error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=4, - message=status_pb2.Status, - ) - last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class CreateBackupPlanAssociationRequest(proto.Message): - r"""Request message for creating a backup plan. - - Attributes: - parent (str): - Required. The backup plan association project and location - in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR locations map to GCP regions, for example - **us-central1**. - backup_plan_association_id (str): - Required. The name of the backup plan - association to create. The name must be unique - for the specified project and location. - backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): - Required. The resource being created - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_plan_association_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup_plan_association: 'BackupPlanAssociation' = proto.Field( - proto.MESSAGE, - number=3, - message='BackupPlanAssociation', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupPlanAssociationsRequest(proto.Message): - r"""Request message for List BackupPlanAssociation - - Attributes: - parent (str): - Required. The project and location for which to retrieve - backup Plan Associations information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve backup plan associations for - all locations, use "-" for the ``{location}`` value. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListBackupPlanAssociationsResponse(proto.Message): - r"""Response message for List BackupPlanAssociation - - Attributes: - backup_plan_associations (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanAssociation]): - The list of Backup Plan Associations in the project for the - specified location. - - If the ``{location}`` value in the request is "-", the - response contains a list of instances from all locations. In - case any location is unreachable, the response will only - return backup plan associations in reachable locations and - the 'unreachable' field will be populated with a list of - unreachable locations. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - backup_plan_associations: MutableSequence['BackupPlanAssociation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BackupPlanAssociation', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetBackupPlanAssociationRequest(proto.Message): - r"""Request message for getting a BackupPlanAssociation resource. - - Attributes: - name (str): - Required. Name of the backup plan association resource, in - the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteBackupPlanAssociationRequest(proto.Message): - r"""Request message for deleting a backup plan association. - - Attributes: - name (str): - Required. Name of the backup plan association resource, in - the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class TriggerBackupRequest(proto.Message): - r"""Request message for triggering a backup. - - Attributes: - name (str): - Required. Name of the backup plan association resource, in - the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` - rule_id (str): - Required. backup rule_id for which a backup needs to be - triggered. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - rule_id: str = proto.Field( - proto.STRING, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault.py deleted file mode 100644 index a7403617ab25..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault.py +++ /dev/null @@ -1,2074 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.backupdr_v1.types import backupvault_ba -from google.cloud.backupdr_v1.types import backupvault_gce -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.backupdr.v1', - manifest={ - 'BackupConfigState', - 'BackupView', - 'BackupVaultView', - 'BackupVault', - 'DataSource', - 'BackupConfigInfo', - 'GcpBackupConfig', - 'BackupApplianceBackupConfig', - 'DataSourceGcpResource', - 'DataSourceBackupApplianceApplication', - 'ServiceLockInfo', - 'BackupApplianceLockInfo', - 'BackupLock', - 'Backup', - 'CreateBackupVaultRequest', - 'ListBackupVaultsRequest', - 'ListBackupVaultsResponse', - 'FetchUsableBackupVaultsRequest', - 'FetchUsableBackupVaultsResponse', - 'GetBackupVaultRequest', - 'UpdateBackupVaultRequest', - 'DeleteBackupVaultRequest', - 'ListDataSourcesRequest', - 'ListDataSourcesResponse', - 'GetDataSourceRequest', - 'UpdateDataSourceRequest', - 'ListBackupsRequest', - 'ListBackupsResponse', - 'GetBackupRequest', - 'UpdateBackupRequest', - 'DeleteBackupRequest', - 'RestoreBackupRequest', - 'RestoreBackupResponse', - 'TargetResource', - 'GcpResource', - }, -) - - -class BackupConfigState(proto.Enum): - r"""Backup configuration state. Is the resource configured for - backup? - - Values: - BACKUP_CONFIG_STATE_UNSPECIFIED (0): - The possible states of backup configuration. - Status not set. - ACTIVE (1): - The data source is actively protected (i.e. - there is a BackupPlanAssociation or Appliance - SLA pointing to it) - PASSIVE (2): - The data source is no longer protected (but - may have backups under it) - """ - BACKUP_CONFIG_STATE_UNSPECIFIED = 0 - ACTIVE = 1 - PASSIVE = 2 - - -class BackupView(proto.Enum): - r"""BackupView contains enum options for Partial and Full view. - - Values: - BACKUP_VIEW_UNSPECIFIED (0): - If the value is not set, the default 'FULL' - view is used. - BACKUP_VIEW_BASIC (1): - Includes basic data about the Backup, but not - the full contents. - BACKUP_VIEW_FULL (2): - Includes all data about the Backup. - This is the default value (for both ListBackups - and GetBackup). - """ - BACKUP_VIEW_UNSPECIFIED = 0 - BACKUP_VIEW_BASIC = 1 - BACKUP_VIEW_FULL = 2 - - -class BackupVaultView(proto.Enum): - r"""BackupVaultView contains enum options for Partial and Full - view. - - Values: - BACKUP_VAULT_VIEW_UNSPECIFIED (0): - If the value is not set, the default 'FULL' - view is used. - BACKUP_VAULT_VIEW_BASIC (1): - Includes basic data about the Backup Vault, - but not the full contents. - BACKUP_VAULT_VIEW_FULL (2): - Includes all data about the Backup Vault. - This is the default value (for both - ListBackupVaults and GetBackupVault). - """ - BACKUP_VAULT_VIEW_UNSPECIFIED = 0 - BACKUP_VAULT_VIEW_BASIC = 1 - BACKUP_VAULT_VIEW_FULL = 2 - - -class BackupVault(proto.Message): - r"""Message describing a BackupVault object. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. Name of the backup vault to create. - It must have the - format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}"``. - ``{backupvault}`` cannot be changed after creation. It must - be between 3-63 characters long and must be unique within - the project and location. - description (str): - Optional. The description of the BackupVault - instance (2048 characters or less). - - This field is a member of `oneof`_ ``_description``. - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user - provided metadata. No labels currently defined: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - created. - - This field is a member of `oneof`_ ``_create_time``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - updated. - - This field is a member of `oneof`_ ``_update_time``. - backup_minimum_enforced_retention_duration (google.protobuf.duration_pb2.Duration): - Required. The default and minimum enforced - retention for each backup within the backup - vault. The enforced retention for each backup - can be extended. - - This field is a member of `oneof`_ ``_backup_minimum_enforced_retention_duration``. - deletable (bool): - Output only. Set to true when there are no - backups nested under this resource. - - This field is a member of `oneof`_ ``_deletable``. - etag (str): - Optional. Server specified ETag for the - backup vault resource to prevent simultaneous - updates from overwiting each other. - - This field is a member of `oneof`_ ``_etag``. - state (google.cloud.backupdr_v1.types.BackupVault.State): - Output only. The BackupVault resource - instance state. - effective_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Time after which the BackupVault - resource is locked. - - This field is a member of `oneof`_ ``_effective_time``. - backup_count (int): - Output only. The number of backups in this - backup vault. - service_account (str): - Output only. Service account used by the - BackupVault Service for this BackupVault. The - user should grant this account permissions in - their workload project to enable the service to - run backups and restores there. - total_stored_bytes (int): - Output only. Total size of the storage used - by all backup resources. - uid (str): - Output only. Immutable after resource - creation until resource deletion. - annotations (MutableMapping[str, str]): - Optional. User annotations. See - https://google.aip.dev/128#annotations Stores - small amounts of arbitrary data. - access_restriction (google.cloud.backupdr_v1.types.BackupVault.AccessRestriction): - Optional. Note: This field is added for future use case and - will not be supported in the current release. - - Access restriction for the backup vault. Default value is - WITHIN_ORGANIZATION if not provided during creation. - """ - class State(proto.Enum): - r"""Holds the state of the backup vault resource. - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - The backup vault is being created. - ACTIVE (2): - The backup vault has been created and is - fully usable. - DELETING (3): - The backup vault is being deleted. - ERROR (4): - The backup vault is experiencing an issue and - might be unusable. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - DELETING = 3 - ERROR = 4 - - class AccessRestriction(proto.Enum): - r"""Holds the access restriction for the backup vault. - - Values: - ACCESS_RESTRICTION_UNSPECIFIED (0): - Access restriction not set. If user does not provide any - value or pass this value, it will be changed to - WITHIN_ORGANIZATION. - WITHIN_PROJECT (1): - Access to or from resources outside your - current project will be denied. - WITHIN_ORGANIZATION (2): - Access to or from resources outside your - current organization will be denied. - UNRESTRICTED (3): - No access restriction. - WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA (4): - Access to or from resources outside your - current organization will be denied except for - backup appliance. - """ - ACCESS_RESTRICTION_UNSPECIFIED = 0 - WITHIN_PROJECT = 1 - WITHIN_ORGANIZATION = 2 - UNRESTRICTED = 3 - WITHIN_ORG_BUT_UNRESTRICTED_FOR_BA = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - optional=True, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - optional=True, - message=timestamp_pb2.Timestamp, - ) - backup_minimum_enforced_retention_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=20, - optional=True, - message=duration_pb2.Duration, - ) - deletable: bool = proto.Field( - proto.BOOL, - number=8, - optional=True, - ) - etag: str = proto.Field( - proto.STRING, - number=9, - optional=True, - ) - state: State = proto.Field( - proto.ENUM, - number=10, - enum=State, - ) - effective_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - optional=True, - message=timestamp_pb2.Timestamp, - ) - backup_count: int = proto.Field( - proto.INT64, - number=17, - ) - service_account: str = proto.Field( - proto.STRING, - number=18, - ) - total_stored_bytes: int = proto.Field( - proto.INT64, - number=19, - ) - uid: str = proto.Field( - proto.STRING, - number=21, - ) - annotations: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=22, - ) - access_restriction: AccessRestriction = proto.Field( - proto.ENUM, - number=24, - enum=AccessRestriction, - ) - - -class DataSource(proto.Message): - r"""Message describing a DataSource object. - Datasource object used to represent Datasource details for both - admin and basic view. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. Name of the datasource to create. - It must have the - format\ ``"projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}"``. - ``{datasource}`` cannot be changed after creation. It must - be between 3-63 characters long and must be unique within - the backup vault. - state (google.cloud.backupdr_v1.types.DataSource.State): - Output only. The DataSource resource instance - state. - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user - provided metadata. No labels currently defined: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - created. - - This field is a member of `oneof`_ ``_create_time``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - updated. - - This field is a member of `oneof`_ ``_update_time``. - backup_count (int): - Number of backups in the data source. - - This field is a member of `oneof`_ ``_backup_count``. - etag (str): - Server specified ETag for the - ManagementServer resource to prevent - simultaneous updates from overwiting each other. - - This field is a member of `oneof`_ ``_etag``. - total_stored_bytes (int): - The number of bytes (metadata and data) - stored in this datasource. - - This field is a member of `oneof`_ ``_total_stored_bytes``. - config_state (google.cloud.backupdr_v1.types.BackupConfigState): - Output only. The backup configuration state. - backup_config_info (google.cloud.backupdr_v1.types.BackupConfigInfo): - Output only. Details of how the resource is - configured for backup. - data_source_gcp_resource (google.cloud.backupdr_v1.types.DataSourceGcpResource): - The backed up resource is a Google Cloud - resource. The word 'DataSource' was included in - the names to indicate that this is the - representation of the Google Cloud resource used - within the DataSource object. - - This field is a member of `oneof`_ ``source_resource``. - data_source_backup_appliance_application (google.cloud.backupdr_v1.types.DataSourceBackupApplianceApplication): - The backed up resource is a backup appliance - application. - - This field is a member of `oneof`_ ``source_resource``. - """ - class State(proto.Enum): - r"""Holds the state of the data source resource. - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - The data source is being created. - ACTIVE (2): - The data source has been created and is fully - usable. - DELETING (3): - The data source is being deleted. - ERROR (4): - The data source is experiencing an issue and - might be unusable. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - DELETING = 3 - ERROR = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - state: State = proto.Field( - proto.ENUM, - number=21, - enum=State, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - optional=True, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - optional=True, - message=timestamp_pb2.Timestamp, - ) - backup_count: int = proto.Field( - proto.INT64, - number=7, - optional=True, - ) - etag: str = proto.Field( - proto.STRING, - number=14, - optional=True, - ) - total_stored_bytes: int = proto.Field( - proto.INT64, - number=23, - optional=True, - ) - config_state: 'BackupConfigState' = proto.Field( - proto.ENUM, - number=24, - enum='BackupConfigState', - ) - backup_config_info: 'BackupConfigInfo' = proto.Field( - proto.MESSAGE, - number=25, - message='BackupConfigInfo', - ) - data_source_gcp_resource: 'DataSourceGcpResource' = proto.Field( - proto.MESSAGE, - number=26, - oneof='source_resource', - message='DataSourceGcpResource', - ) - data_source_backup_appliance_application: 'DataSourceBackupApplianceApplication' = proto.Field( - proto.MESSAGE, - number=27, - oneof='source_resource', - message='DataSourceBackupApplianceApplication', - ) - - -class BackupConfigInfo(proto.Message): - r"""BackupConfigInfo has information about how the resource is - configured for Backup and about the most recent backup to this - vault. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - last_backup_state (google.cloud.backupdr_v1.types.BackupConfigInfo.LastBackupState): - Output only. The status of the last backup to - this BackupVault - last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. If the last backup were - successful, this field has the consistency date. - last_backup_error (google.rpc.status_pb2.Status): - Output only. If the last backup failed, this - field has the error message. - gcp_backup_config (google.cloud.backupdr_v1.types.GcpBackupConfig): - Configuration for a Google Cloud resource. - - This field is a member of `oneof`_ ``backup_config``. - backup_appliance_backup_config (google.cloud.backupdr_v1.types.BackupApplianceBackupConfig): - Configuration for an application backed up by - a Backup Appliance. - - This field is a member of `oneof`_ ``backup_config``. - """ - class LastBackupState(proto.Enum): - r"""LastBackupstate tracks whether the last backup was not yet - started, successful, failed, or could not be run because of the - lack of permissions. - - Values: - LAST_BACKUP_STATE_UNSPECIFIED (0): - Status not set. - FIRST_BACKUP_PENDING (1): - The first backup has not yet completed - SUCCEEDED (2): - The most recent backup was successful - FAILED (3): - The most recent backup failed - PERMISSION_DENIED (4): - The most recent backup could not be - run/failed because of the lack of permissions - """ - LAST_BACKUP_STATE_UNSPECIFIED = 0 - FIRST_BACKUP_PENDING = 1 - SUCCEEDED = 2 - FAILED = 3 - PERMISSION_DENIED = 4 - - last_backup_state: LastBackupState = proto.Field( - proto.ENUM, - number=1, - enum=LastBackupState, - ) - last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - last_backup_error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=3, - message=status_pb2.Status, - ) - gcp_backup_config: 'GcpBackupConfig' = proto.Field( - proto.MESSAGE, - number=4, - oneof='backup_config', - message='GcpBackupConfig', - ) - backup_appliance_backup_config: 'BackupApplianceBackupConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='backup_config', - message='BackupApplianceBackupConfig', - ) - - -class GcpBackupConfig(proto.Message): - r"""GcpBackupConfig captures the Backup configuration details for - Google Cloud resources. All Google Cloud resources regardless of - type are protected with backup plan associations. - - Attributes: - backup_plan (str): - The name of the backup plan. - backup_plan_description (str): - The description of the backup plan. - backup_plan_association (str): - The name of the backup plan association. - backup_plan_rules (MutableSequence[str]): - The names of the backup plan rules which - point to this backupvault - """ - - backup_plan: str = proto.Field( - proto.STRING, - number=1, - ) - backup_plan_description: str = proto.Field( - proto.STRING, - number=2, - ) - backup_plan_association: str = proto.Field( - proto.STRING, - number=3, - ) - backup_plan_rules: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class BackupApplianceBackupConfig(proto.Message): - r"""BackupApplianceBackupConfig captures the backup configuration - for applications that are protected by Backup Appliances. - - Attributes: - backup_appliance_name (str): - The name of the backup appliance. - backup_appliance_id (int): - The ID of the backup appliance. - sla_id (int): - The ID of the SLA of this application. - application_name (str): - The name of the application. - host_name (str): - The name of the host where the application is - running. - slt_name (str): - The name of the SLT associated with the - application. - slp_name (str): - The name of the SLP associated with the - application. - """ - - backup_appliance_name: str = proto.Field( - proto.STRING, - number=1, - ) - backup_appliance_id: int = proto.Field( - proto.INT64, - number=2, - ) - sla_id: int = proto.Field( - proto.INT64, - number=3, - ) - application_name: str = proto.Field( - proto.STRING, - number=4, - ) - host_name: str = proto.Field( - proto.STRING, - number=5, - ) - slt_name: str = proto.Field( - proto.STRING, - number=6, - ) - slp_name: str = proto.Field( - proto.STRING, - number=7, - ) - - -class DataSourceGcpResource(proto.Message): - r"""DataSourceGcpResource is used for protected resources that - are Google Cloud Resources. This name is easeier to understand - than GcpResourceDataSource or GcpDataSourceResource - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcp_resourcename (str): - Output only. Full resource pathname URL of - the source Google Cloud resource. - location (str): - Location of the resource: - //"global"/"unspecified". - type_ (str): - The type of the Google Cloud resource. Use - the Unified Resource Type, eg. - compute.googleapis.com/Instance. - compute_instance_datasource_properties (google.cloud.backupdr_v1.types.ComputeInstanceDataSourceProperties): - ComputeInstanceDataSourceProperties has a - subset of Compute Instance properties that are - useful at the Datasource level. - - This field is a member of `oneof`_ ``gcp_resource_properties``. - """ - - gcp_resourcename: str = proto.Field( - proto.STRING, - number=1, - ) - location: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=3, - ) - compute_instance_datasource_properties: backupvault_gce.ComputeInstanceDataSourceProperties = proto.Field( - proto.MESSAGE, - number=4, - oneof='gcp_resource_properties', - message=backupvault_gce.ComputeInstanceDataSourceProperties, - ) - - -class DataSourceBackupApplianceApplication(proto.Message): - r"""BackupApplianceApplication describes a Source Resource when - it is an application backed up by a BackupAppliance. - - Attributes: - application_name (str): - The name of the Application as known to the - Backup Appliance. - backup_appliance (str): - Appliance name. - appliance_id (int): - Appliance Id of the Backup Appliance. - type_ (str): - The type of the application. e.g. VMBackup - application_id (int): - The appid field of the application within the - Backup Appliance. - hostname (str): - Hostname of the host where the application is - running. - host_id (int): - Hostid of the application host. - """ - - application_name: str = proto.Field( - proto.STRING, - number=1, - ) - backup_appliance: str = proto.Field( - proto.STRING, - number=2, - ) - appliance_id: int = proto.Field( - proto.INT64, - number=3, - ) - type_: str = proto.Field( - proto.STRING, - number=4, - ) - application_id: int = proto.Field( - proto.INT64, - number=8, - ) - hostname: str = proto.Field( - proto.STRING, - number=6, - ) - host_id: int = proto.Field( - proto.INT64, - number=7, - ) - - -class ServiceLockInfo(proto.Message): - r"""ServiceLockInfo represents the details of a lock taken by the - service on a Backup resource. - - Attributes: - operation (str): - Output only. The name of the operation that - created this lock. The lock will automatically - be released when the operation completes. - """ - - operation: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BackupApplianceLockInfo(proto.Message): - r"""BackupApplianceLockInfo contains metadata about the - backupappliance that created the lock. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - backup_appliance_id (int): - Required. The ID of the backup/recovery - appliance that created this lock. - backup_appliance_name (str): - Required. The name of the backup/recovery - appliance that created this lock. - lock_reason (str): - Required. The reason for the lock: e.g. - MOUNT/RESTORE/BACKUP/etc. The value of this - string is only meaningful to the client and it - is not interpreted by the BackupVault service. - job_name (str): - The job name on the backup/recovery appliance - that created this lock. - - This field is a member of `oneof`_ ``lock_source``. - backup_image (str): - The image name that depends on this Backup. - - This field is a member of `oneof`_ ``lock_source``. - sla_id (int): - The SLA on the backup/recovery appliance that - owns the lock. - - This field is a member of `oneof`_ ``lock_source``. - """ - - backup_appliance_id: int = proto.Field( - proto.INT64, - number=1, - ) - backup_appliance_name: str = proto.Field( - proto.STRING, - number=2, - ) - lock_reason: str = proto.Field( - proto.STRING, - number=5, - ) - job_name: str = proto.Field( - proto.STRING, - number=6, - oneof='lock_source', - ) - backup_image: str = proto.Field( - proto.STRING, - number=7, - oneof='lock_source', - ) - sla_id: int = proto.Field( - proto.INT64, - number=8, - oneof='lock_source', - ) - - -class BackupLock(proto.Message): - r"""BackupLock represents a single lock on a Backup resource. An - unexpired lock on a Backup prevents the Backup from being - deleted. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - lock_until_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The time after which this lock is - not considered valid and will no longer protect - the Backup from deletion. - backup_appliance_lock_info (google.cloud.backupdr_v1.types.BackupApplianceLockInfo): - If the client is a backup and recovery - appliance, this contains metadata about why the - lock exists. - - This field is a member of `oneof`_ ``ClientLockInfo``. - service_lock_info (google.cloud.backupdr_v1.types.ServiceLockInfo): - Output only. Contains metadata about the lock - exist for Google Cloud native backups. - - This field is a member of `oneof`_ ``ClientLockInfo``. - """ - - lock_until_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - backup_appliance_lock_info: 'BackupApplianceLockInfo' = proto.Field( - proto.MESSAGE, - number=3, - oneof='ClientLockInfo', - message='BackupApplianceLockInfo', - ) - service_lock_info: 'ServiceLockInfo' = proto.Field( - proto.MESSAGE, - number=4, - oneof='ClientLockInfo', - message='ServiceLockInfo', - ) - - -class Backup(proto.Message): - r"""Message describing a Backup object. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. Identifier. Name of the backup to create. It - must have the - format\ ``"projects//locations//backupVaults//dataSources/{datasource}/backups/{backup}"``. - ``{backup}`` cannot be changed after creation. It must be - between 3-63 characters long and must be unique within the - datasource. - description (str): - Output only. The description of the Backup - instance (2048 characters or less). - - This field is a member of `oneof`_ ``_description``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - created. - - This field is a member of `oneof`_ ``_create_time``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when the instance was - updated. - - This field is a member of `oneof`_ ``_update_time``. - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user - provided metadata. No labels currently defined. - enforced_retention_end_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The backup can not be deleted - before this time. - - This field is a member of `oneof`_ ``_enforced_retention_end_time``. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. When this backup is automatically - expired. - - This field is a member of `oneof`_ ``_expire_time``. - consistency_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The point in time when this - backup was captured from the source. - - This field is a member of `oneof`_ ``_consistency_time``. - etag (str): - Optional. Server specified ETag to prevent - updates from overwriting each other. - - This field is a member of `oneof`_ ``_etag``. - state (google.cloud.backupdr_v1.types.Backup.State): - Output only. The Backup resource instance - state. - service_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): - Output only. The list of BackupLocks taken by - the service to prevent the deletion of the - backup. - backup_appliance_locks (MutableSequence[google.cloud.backupdr_v1.types.BackupLock]): - Optional. The list of BackupLocks taken by - the accessor Backup Appliance. - compute_instance_backup_properties (google.cloud.backupdr_v1.types.ComputeInstanceBackupProperties): - Output only. Compute Engine specific backup - properties. - - This field is a member of `oneof`_ ``backup_properties``. - backup_appliance_backup_properties (google.cloud.backupdr_v1.types.BackupApplianceBackupProperties): - Output only. Backup Appliance specific backup - properties. - - This field is a member of `oneof`_ ``backup_properties``. - backup_type (google.cloud.backupdr_v1.types.Backup.BackupType): - Output only. Type of the backup, unspecified, - scheduled or ondemand. - gcp_backup_plan_info (google.cloud.backupdr_v1.types.Backup.GCPBackupPlanInfo): - Output only. Configuration for a Google Cloud - resource. - - This field is a member of `oneof`_ ``plan_info``. - resource_size_bytes (int): - Output only. source resource size in bytes at - the time of the backup. - """ - class State(proto.Enum): - r"""Holds the state of the backup resource. - - Values: - STATE_UNSPECIFIED (0): - State not set. - CREATING (1): - The backup is being created. - ACTIVE (2): - The backup has been created and is fully - usable. - DELETING (3): - The backup is being deleted. - ERROR (4): - The backup is experiencing an issue and might - be unusable. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - ACTIVE = 2 - DELETING = 3 - ERROR = 4 - - class BackupType(proto.Enum): - r"""Type of the backup, scheduled or ondemand. - - Values: - BACKUP_TYPE_UNSPECIFIED (0): - Backup type is unspecified. - SCHEDULED (1): - Scheduled backup. - ON_DEMAND (2): - On demand backup. - """ - BACKUP_TYPE_UNSPECIFIED = 0 - SCHEDULED = 1 - ON_DEMAND = 2 - - class GCPBackupPlanInfo(proto.Message): - r"""GCPBackupPlanInfo captures the plan configuration details of - Google Cloud resources at the time of backup. - - Attributes: - backup_plan (str): - Resource name of backup plan by which - workload is protected at the time of the backup. - Format: - - projects/{project}/locations/{location}/backupPlans/{backupPlanId} - backup_plan_rule_id (str): - The rule id of the backup plan which - triggered this backup in case of scheduled - backup or used for - """ - - backup_plan: str = proto.Field( - proto.STRING, - number=1, - ) - backup_plan_rule_id: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - optional=True, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - optional=True, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - enforced_retention_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - optional=True, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - optional=True, - message=timestamp_pb2.Timestamp, - ) - consistency_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - optional=True, - message=timestamp_pb2.Timestamp, - ) - etag: str = proto.Field( - proto.STRING, - number=11, - optional=True, - ) - state: State = proto.Field( - proto.ENUM, - number=15, - enum=State, - ) - service_locks: MutableSequence['BackupLock'] = proto.RepeatedField( - proto.MESSAGE, - number=17, - message='BackupLock', - ) - backup_appliance_locks: MutableSequence['BackupLock'] = proto.RepeatedField( - proto.MESSAGE, - number=18, - message='BackupLock', - ) - compute_instance_backup_properties: backupvault_gce.ComputeInstanceBackupProperties = proto.Field( - proto.MESSAGE, - number=19, - oneof='backup_properties', - message=backupvault_gce.ComputeInstanceBackupProperties, - ) - backup_appliance_backup_properties: backupvault_ba.BackupApplianceBackupProperties = proto.Field( - proto.MESSAGE, - number=21, - oneof='backup_properties', - message=backupvault_ba.BackupApplianceBackupProperties, - ) - backup_type: BackupType = proto.Field( - proto.ENUM, - number=20, - enum=BackupType, - ) - gcp_backup_plan_info: GCPBackupPlanInfo = proto.Field( - proto.MESSAGE, - number=22, - oneof='plan_info', - message=GCPBackupPlanInfo, - ) - resource_size_bytes: int = proto.Field( - proto.INT64, - number=23, - ) - - -class CreateBackupVaultRequest(proto.Message): - r"""Message for creating a BackupVault. - - Attributes: - parent (str): - Required. Value for parent. - backup_vault_id (str): - Required. ID of the requesting object If auto-generating ID - server-side, remove this field and backup_vault_id from the - method_signature of Create RPC - backup_vault (google.cloud.backupdr_v1.types.BackupVault): - Required. The resource being created - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is 'false'. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - backup_vault_id: str = proto.Field( - proto.STRING, - number=2, - ) - backup_vault: 'BackupVault' = proto.Field( - proto.MESSAGE, - number=3, - message='BackupVault', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class ListBackupVaultsRequest(proto.Message): - r"""Request message for listing backupvault stores. - - Attributes: - parent (str): - Required. The project and location for which to retrieve - backupvault stores information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, for - example **us-central1**. To retrieve backupvault stores for - all locations, use "-" for the '{location}' value. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - order_by (str): - Optional. Hint for how to order the results. - view (google.cloud.backupdr_v1.types.BackupVaultView): - Optional. Reserved for future use to provide - a BASIC & FULL view of Backup Vault. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - view: 'BackupVaultView' = proto.Field( - proto.ENUM, - number=6, - enum='BackupVaultView', - ) - - -class ListBackupVaultsResponse(proto.Message): - r"""Response message for listing BackupVaults. - - Attributes: - backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): - The list of BackupVault instances in the - project for the specified location. - - If the '{location}' value in the request is "-", - the response contains a list of instances from - all locations. In case any location is - unreachable, the response will only return - backup vaults in reachable locations and the - 'unreachable' field will be populated with a - list of unreachable locations. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - backup_vaults: MutableSequence['BackupVault'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BackupVault', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class FetchUsableBackupVaultsRequest(proto.Message): - r"""Request message for fetching usable BackupVaults. - - Attributes: - parent (str): - Required. The project and location for which to retrieve - backupvault stores information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, for - example **us-central1**. To retrieve backupvault stores for - all locations, use "-" for the '{location}' value. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - order_by (str): - Optional. Hint for how to order the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class FetchUsableBackupVaultsResponse(proto.Message): - r"""Response message for fetching usable BackupVaults. - - Attributes: - backup_vaults (MutableSequence[google.cloud.backupdr_v1.types.BackupVault]): - The list of BackupVault instances in the - project for the specified location. - - If the '{location}' value in the request is "-", - the response contains a list of instances from - all locations. In case any location is - unreachable, the response will only return - backup vaults in reachable locations and the - 'unreachable' field will be populated with a - list of unreachable locations. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - backup_vaults: MutableSequence['BackupVault'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BackupVault', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetBackupVaultRequest(proto.Message): - r"""Request message for getting a BackupVault. - - Attributes: - name (str): - Required. Name of the backupvault store resource name, in - the format - 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}' - view (google.cloud.backupdr_v1.types.BackupVaultView): - Optional. Reserved for future use to provide - a BASIC & FULL view of Backup Vault - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'BackupVaultView' = proto.Field( - proto.ENUM, - number=2, - enum='BackupVaultView', - ) - - -class UpdateBackupVaultRequest(proto.Message): - r"""Request message for updating a BackupVault. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the BackupVault resource by the update. The - fields specified in the update_mask are relative to the - resource, not the full request. A field will be overwritten - if it is in the mask. If the user does not provide a mask - then the request will fail. - backup_vault (google.cloud.backupdr_v1.types.BackupVault): - Required. The resource being updated - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is 'false'. - force (bool): - Optional. If set to true, will not check plan - duration against backup vault enforcement - duration. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - backup_vault: 'BackupVault' = proto.Field( - proto.MESSAGE, - number=2, - message='BackupVault', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - force: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DeleteBackupVaultRequest(proto.Message): - r"""Message for deleting a BackupVault. - - Attributes: - name (str): - Required. Name of the resource. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - force (bool): - Optional. If set to true, any data source - from this backup vault will also be deleted. - etag (str): - The current etag of the backup vault. - If an etag is provided and does not match the - current etag of the connection, deletion will be - blocked. - validate_only (bool): - Optional. Only validate the request, but do - not perform mutations. The default is 'false'. - allow_missing (bool): - Optional. If true and the BackupVault is not - found, the request will succeed but no action - will be taken. - ignore_backup_plan_references (bool): - Optional. If set to true, backupvault - deletion will proceed even if there are backup - plans referencing the backupvault. The default - is 'false'. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - force: bool = proto.Field( - proto.BOOL, - number=3, - ) - etag: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=6, - ) - ignore_backup_plan_references: bool = proto.Field( - proto.BOOL, - number=7, - ) - - -class ListDataSourcesRequest(proto.Message): - r"""Request message for listing DataSources. - - Attributes: - parent (str): - Required. The project and location for which to retrieve - data sources information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, for - example **us-central1**. To retrieve data sources for all - locations, use "-" for the '{location}' value. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - order_by (str): - Optional. Hint for how to order the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDataSourcesResponse(proto.Message): - r"""Response message for listing DataSources. - - Attributes: - data_sources (MutableSequence[google.cloud.backupdr_v1.types.DataSource]): - The list of DataSource instances in the - project for the specified location. - - If the '{location}' value in the request is "-", - the response contains a list of instances from - all locations. In case any location is - unreachable, the response will only return data - sources in reachable locations and the - 'unreachable' field will be populated with a - list of unreachable locations. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - data_sources: MutableSequence['DataSource'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataSource', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetDataSourceRequest(proto.Message): - r"""Request message for getting a DataSource instance. - - Attributes: - name (str): - Required. Name of the data source resource name, in the - format - 'projects/{project_id}/locations/{location}/backupVaults/{resource_name}/dataSource/{resource_name}' - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDataSourceRequest(proto.Message): - r"""Request message for updating a data source instance. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the DataSource resource by the update. The - fields specified in the update_mask are relative to the - resource, not the full request. A field will be overwritten - if it is in the mask. If the user does not provide a mask - then the request will fail. - data_source (google.cloud.backupdr_v1.types.DataSource): - Required. The resource being updated - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - allow_missing (bool): - Optional. Enable upsert. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_source: 'DataSource' = proto.Field( - proto.MESSAGE, - number=2, - message='DataSource', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class ListBackupsRequest(proto.Message): - r"""Request message for listing Backups. - - Attributes: - parent (str): - Required. The project and location for which to retrieve - backup information, in the format - 'projects/{project_id}/locations/{location}'. In Cloud - Backup and DR, locations map to Google Cloud regions, for - example **us-central1**. To retrieve data sources for all - locations, use "-" for the '{location}' value. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - order_by (str): - Optional. Hint for how to order the results. - view (google.cloud.backupdr_v1.types.BackupView): - Optional. Reserved for future use to provide - a BASIC & FULL view of Backup resource. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - view: 'BackupView' = proto.Field( - proto.ENUM, - number=6, - enum='BackupView', - ) - - -class ListBackupsResponse(proto.Message): - r"""Response message for listing Backups. - - Attributes: - backups (MutableSequence[google.cloud.backupdr_v1.types.Backup]): - The list of Backup instances in the project - for the specified location. - - If the '{location}' value in the request is "-", - the response contains a list of instances from - all locations. In case any location is - unreachable, the response will only return data - sources in reachable locations and the - 'unreachable' field will be populated with a - list of unreachable locations. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - backups: MutableSequence['Backup'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Backup', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetBackupRequest(proto.Message): - r"""Request message for getting a Backup. - - Attributes: - name (str): - Required. Name of the data source resource name, in the - format - 'projects/{project_id}/locations/{location}/backupVaults/{backupVault}/dataSources/{datasource}/backups/{backup}' - view (google.cloud.backupdr_v1.types.BackupView): - Optional. Reserved for future use to provide - a BASIC & FULL view of Backup resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'BackupView' = proto.Field( - proto.ENUM, - number=2, - enum='BackupView', - ) - - -class UpdateBackupRequest(proto.Message): - r"""Request message for updating a Backup. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the Backup resource by the update. The fields - specified in the update_mask are relative to the resource, - not the full request. A field will be overwritten if it is - in the mask. If the user does not provide a mask then the - request will fail. - backup (google.cloud.backupdr_v1.types.Backup): - Required. The resource being updated - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - backup: 'Backup' = proto.Field( - proto.MESSAGE, - number=2, - message='Backup', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteBackupRequest(proto.Message): - r"""Message for deleting a Backup. - - Attributes: - name (str): - Required. Name of the resource. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RestoreBackupRequest(proto.Message): - r"""Request message for restoring from a Backup. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. The resource name of the Backup instance, in the - format - 'projects/*/locations/*/backupVaults/*/dataSources/*/backups/'. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - compute_instance_target_environment (google.cloud.backupdr_v1.types.ComputeInstanceTargetEnvironment): - Compute Engine target environment to be used - during restore. - - This field is a member of `oneof`_ ``target_environment``. - compute_instance_restore_properties (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties): - Compute Engine instance properties to be - overridden during restore. - - This field is a member of `oneof`_ ``instance_properties``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - compute_instance_target_environment: backupvault_gce.ComputeInstanceTargetEnvironment = proto.Field( - proto.MESSAGE, - number=3, - oneof='target_environment', - message=backupvault_gce.ComputeInstanceTargetEnvironment, - ) - compute_instance_restore_properties: backupvault_gce.ComputeInstanceRestoreProperties = proto.Field( - proto.MESSAGE, - number=4, - oneof='instance_properties', - message=backupvault_gce.ComputeInstanceRestoreProperties, - ) - - -class RestoreBackupResponse(proto.Message): - r"""Response message for restoring from a Backup. - - Attributes: - target_resource (google.cloud.backupdr_v1.types.TargetResource): - Details of the target resource - created/modified as part of restore. - """ - - target_resource: 'TargetResource' = proto.Field( - proto.MESSAGE, - number=1, - message='TargetResource', - ) - - -class TargetResource(proto.Message): - r"""Details of the target resource created/modified as part of - restore. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcp_resource (google.cloud.backupdr_v1.types.GcpResource): - Details of the native Google Cloud resource - created as part of restore. - - This field is a member of `oneof`_ ``target_resource_info``. - """ - - gcp_resource: 'GcpResource' = proto.Field( - proto.MESSAGE, - number=1, - oneof='target_resource_info', - message='GcpResource', - ) - - -class GcpResource(proto.Message): - r"""Minimum details to identify a Google Cloud resource - - Attributes: - gcp_resourcename (str): - Name of the Google Cloud resource. - location (str): - Location of the resource: - //"global"/"unspecified". - type_ (str): - Type of the resource. Use the Unified - Resource Type, eg. - compute.googleapis.com/Instance. - """ - - gcp_resourcename: str = proto.Field( - proto.STRING, - number=1, - ) - location: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault_ba.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault_ba.py deleted file mode 100644 index 43431f44f748..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault_ba.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.backupdr.v1', - manifest={ - 'BackupApplianceBackupProperties', - }, -) - - -class BackupApplianceBackupProperties(proto.Message): - r"""BackupApplianceBackupProperties represents BackupDR backup - appliance's properties. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - generation_id (int): - Output only. The numeric generation ID of the - backup (monotonically increasing). - - This field is a member of `oneof`_ ``_generation_id``. - finalize_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this backup object - was finalized (if none, backup is not - finalized). - - This field is a member of `oneof`_ ``_finalize_time``. - recovery_range_start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The earliest timestamp of data - available in this Backup. - - This field is a member of `oneof`_ ``_recovery_range_start_time``. - recovery_range_end_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The latest timestamp of data - available in this Backup. - - This field is a member of `oneof`_ ``_recovery_range_end_time``. - """ - - generation_id: int = proto.Field( - proto.INT32, - number=1, - optional=True, - ) - finalize_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - optional=True, - message=timestamp_pb2.Timestamp, - ) - recovery_range_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - optional=True, - message=timestamp_pb2.Timestamp, - ) - recovery_range_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - optional=True, - message=timestamp_pb2.Timestamp, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault_gce.py b/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault_gce.py deleted file mode 100644 index 5c385890333d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/google/cloud/backupdr_v1/types/backupvault_gce.py +++ /dev/null @@ -1,1983 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.backupdr.v1', - manifest={ - 'KeyRevocationActionType', - 'ComputeInstanceBackupProperties', - 'ComputeInstanceRestoreProperties', - 'ComputeInstanceTargetEnvironment', - 'ComputeInstanceDataSourceProperties', - 'AdvancedMachineFeatures', - 'ConfidentialInstanceConfig', - 'DisplayDevice', - 'AcceleratorConfig', - 'CustomerEncryptionKey', - 'Entry', - 'Metadata', - 'NetworkInterface', - 'NetworkPerformanceConfig', - 'AccessConfig', - 'AliasIpRange', - 'InstanceParams', - 'AllocationAffinity', - 'Scheduling', - 'SchedulingDuration', - 'ServiceAccount', - 'Tags', - 'AttachedDisk', - 'GuestOsFeature', - }, -) - - -class KeyRevocationActionType(proto.Enum): - r"""Specifies whether the virtual machine instance will be shut - down on key revocation. It is currently used in instance, - instance properties and GMI protos - - Values: - KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - NONE (1): - Indicates user chose no operation. - STOP (2): - Indicates user chose to opt for VM shutdown - on key revocation. - """ - KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 0 - NONE = 1 - STOP = 2 - - -class ComputeInstanceBackupProperties(proto.Message): - r"""ComputeInstanceBackupProperties represents Compute Engine - instance backup properties. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - description (str): - An optional text description for the - instances that are created from these - properties. - - This field is a member of `oneof`_ ``_description``. - tags (google.cloud.backupdr_v1.types.Tags): - A list of tags to apply to the instances that - are created from these properties. The tags - identify valid sources or targets for network - firewalls. The setTags method can modify this - list of tags. Each tag within the list must - comply with RFC1035 - (https://www.ietf.org/rfc/rfc1035.txt). - - This field is a member of `oneof`_ ``_tags``. - machine_type (str): - The machine type to use for instances that - are created from these properties. - - This field is a member of `oneof`_ ``_machine_type``. - can_ip_forward (bool): - Enables instances created based on these properties to send - packets with source IP addresses other than their own and - receive packets with destination IP addresses other than - their own. If these instances will be used as an IP gateway - or it will be set as the next-hop in a Route resource, - specify ``true``. If unsure, leave this set to ``false``. - See the - https://cloud.google.com/vpc/docs/using-routes#canipforward - documentation for more information. - - This field is a member of `oneof`_ ``_can_ip_forward``. - network_interface (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): - An array of network access configurations for - this interface. - disk (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): - An array of disks that are associated with - the instances that are created from these - properties. - metadata (google.cloud.backupdr_v1.types.Metadata): - The metadata key/value pairs to assign to - instances that are created from these - properties. These pairs can consist of custom - metadata or predefined keys. See - https://cloud.google.com/compute/docs/metadata/overview - for more information. - - This field is a member of `oneof`_ ``_metadata``. - service_account (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): - A list of service accounts with specified - scopes. Access tokens for these service accounts - are available to the instances that are created - from these properties. Use metadata queries to - obtain the access tokens for these instances. - scheduling (google.cloud.backupdr_v1.types.Scheduling): - Specifies the scheduling options for the - instances that are created from these - properties. - - This field is a member of `oneof`_ ``_scheduling``. - guest_accelerator (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): - A list of guest accelerator cards' type and - count to use for instances created from these - properties. - min_cpu_platform (str): - Minimum cpu/platform to be used by instances. The instance - may be scheduled on the specified or newer cpu/platform. - Applicable values are the friendly names of CPU platforms, - such as ``minCpuPlatform: Intel Haswell`` or - ``minCpuPlatform: Intel Sandy Bridge``. For more - information, read - https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. - - This field is a member of `oneof`_ ``_min_cpu_platform``. - key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): - KeyRevocationActionType of the instance. - Supported options are "STOP" and "NONE". The - default value is "NONE" if it is not specified. - - This field is a member of `oneof`_ ``_key_revocation_action_type``. - source_instance (str): - The source instance used to create this - backup. This can be a partial or full URL to the - resource. For example, the following are valid - values: - - -https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/instance - -projects/project/zones/zone/instances/instance - - This field is a member of `oneof`_ ``_source_instance``. - labels (MutableMapping[str, str]): - Labels to apply to instances that are created - from these properties. - """ - - description: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - tags: 'Tags' = proto.Field( - proto.MESSAGE, - number=2, - optional=True, - message='Tags', - ) - machine_type: str = proto.Field( - proto.STRING, - number=3, - optional=True, - ) - can_ip_forward: bool = proto.Field( - proto.BOOL, - number=4, - optional=True, - ) - network_interface: MutableSequence['NetworkInterface'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='NetworkInterface', - ) - disk: MutableSequence['AttachedDisk'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AttachedDisk', - ) - metadata: 'Metadata' = proto.Field( - proto.MESSAGE, - number=7, - optional=True, - message='Metadata', - ) - service_account: MutableSequence['ServiceAccount'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='ServiceAccount', - ) - scheduling: 'Scheduling' = proto.Field( - proto.MESSAGE, - number=9, - optional=True, - message='Scheduling', - ) - guest_accelerator: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='AcceleratorConfig', - ) - min_cpu_platform: str = proto.Field( - proto.STRING, - number=11, - optional=True, - ) - key_revocation_action_type: 'KeyRevocationActionType' = proto.Field( - proto.ENUM, - number=12, - optional=True, - enum='KeyRevocationActionType', - ) - source_instance: str = proto.Field( - proto.STRING, - number=13, - optional=True, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=14, - ) - - -class ComputeInstanceRestoreProperties(proto.Message): - r"""ComputeInstanceRestoreProperties represents Compute Engine - instance properties to be overridden during restore. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Required. Name of the compute instance. - - This field is a member of `oneof`_ ``_name``. - advanced_machine_features (google.cloud.backupdr_v1.types.AdvancedMachineFeatures): - Optional. Controls for advanced - machine-related behavior features. - - This field is a member of `oneof`_ ``_advanced_machine_features``. - can_ip_forward (bool): - Optional. Allows this instance to send and - receive packets with non-matching destination or - source IPs. - - This field is a member of `oneof`_ ``_can_ip_forward``. - confidential_instance_config (google.cloud.backupdr_v1.types.ConfidentialInstanceConfig): - Optional. Controls Confidential compute - options on the instance - - This field is a member of `oneof`_ ``_confidential_instance_config``. - deletion_protection (bool): - Optional. Whether the resource should be - protected against deletion. - - This field is a member of `oneof`_ ``_deletion_protection``. - description (str): - Optional. An optional description of this - resource. Provide this property when you create - the resource. - - This field is a member of `oneof`_ ``_description``. - disks (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): - Optional. Array of disks associated with this - instance. Persistent disks must be created - before you can assign them. - display_device (google.cloud.backupdr_v1.types.DisplayDevice): - Optional. Enables display device for the - instance. - - This field is a member of `oneof`_ ``_display_device``. - guest_accelerators (MutableSequence[google.cloud.backupdr_v1.types.AcceleratorConfig]): - Optional. A list of the type and count of - accelerator cards attached to the instance. - hostname (str): - Optional. Specifies the hostname of the instance. The - specified hostname must be RFC1035 compliant. If hostname is - not specified, the default hostname is - [INSTANCE_NAME].c.[PROJECT_ID].internal when using the - global DNS, and - [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using - zonal DNS. - - This field is a member of `oneof`_ ``_hostname``. - instance_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): - Optional. Encrypts suspended data for an - instance with a customer-managed encryption key. - - This field is a member of `oneof`_ ``_instance_encryption_key``. - key_revocation_action_type (google.cloud.backupdr_v1.types.KeyRevocationActionType): - Optional. KeyRevocationActionType of the - instance. - - This field is a member of `oneof`_ ``_key_revocation_action_type``. - labels (MutableMapping[str, str]): - Optional. Labels to apply to this instance. - machine_type (str): - Optional. Full or partial URL of the machine - type resource to use for this instance. - - This field is a member of `oneof`_ ``_machine_type``. - metadata (google.cloud.backupdr_v1.types.Metadata): - Optional. This includes custom metadata and - predefined keys. - - This field is a member of `oneof`_ ``_metadata``. - min_cpu_platform (str): - Optional. Minimum CPU platform to use for - this instance. - - This field is a member of `oneof`_ ``_min_cpu_platform``. - network_interfaces (MutableSequence[google.cloud.backupdr_v1.types.NetworkInterface]): - Optional. An array of network configurations - for this instance. These specify how interfaces - are configured to interact with other network - services, such as connecting to the internet. - Multiple interfaces are supported per instance. - network_performance_config (google.cloud.backupdr_v1.types.NetworkPerformanceConfig): - Optional. Configure network performance such - as egress bandwidth tier. - - This field is a member of `oneof`_ ``_network_performance_config``. - params (google.cloud.backupdr_v1.types.InstanceParams): - Input only. Additional params passed with the - request, but not persisted as part of resource - payload. - - This field is a member of `oneof`_ ``_params``. - private_ipv6_google_access (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties.InstancePrivateIpv6GoogleAccess): - Optional. The private IPv6 google access type for the VM. If - not specified, use INHERIT_FROM_SUBNETWORK as default. - - This field is a member of `oneof`_ ``_private_ipv6_google_access``. - allocation_affinity (google.cloud.backupdr_v1.types.AllocationAffinity): - Optional. Specifies the reservations that - this instance can consume from. - - This field is a member of `oneof`_ ``_allocation_affinity``. - resource_policies (MutableSequence[str]): - Optional. Resource policies applied to this - instance. - scheduling (google.cloud.backupdr_v1.types.Scheduling): - Optional. Sets the scheduling options for - this instance. - - This field is a member of `oneof`_ ``_scheduling``. - service_accounts (MutableSequence[google.cloud.backupdr_v1.types.ServiceAccount]): - Optional. A list of service accounts, with - their specified scopes, authorized for this - instance. Only one service account per VM - instance is supported. - tags (google.cloud.backupdr_v1.types.Tags): - Optional. Tags to apply to this instance. - Tags are used to identify valid sources or - targets for network firewalls and are specified - by the client during instance creation. - - This field is a member of `oneof`_ ``_tags``. - """ - class InstancePrivateIpv6GoogleAccess(proto.Enum): - r"""The private IPv6 google access type for the VMs. - - Values: - INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED (0): - Default value. This value is unused. - INHERIT_FROM_SUBNETWORK (1): - Each network interface inherits - PrivateIpv6GoogleAccess from its subnetwork. - ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (2): - Outbound private IPv6 access from VMs in this - subnet to Google services. If specified, the - subnetwork who is attached to the instance's - default network interface will be assigned an - internal IPv6 prefix if it doesn't have before. - ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (3): - Bidirectional private IPv6 access to/from - Google services. If specified, the subnetwork - who is attached to the instance's default - network interface will be assigned an internal - IPv6 prefix if it doesn't have before. - """ - INSTANCE_PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0 - INHERIT_FROM_SUBNETWORK = 1 - ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 2 - ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - advanced_machine_features: 'AdvancedMachineFeatures' = proto.Field( - proto.MESSAGE, - number=2, - optional=True, - message='AdvancedMachineFeatures', - ) - can_ip_forward: bool = proto.Field( - proto.BOOL, - number=3, - optional=True, - ) - confidential_instance_config: 'ConfidentialInstanceConfig' = proto.Field( - proto.MESSAGE, - number=4, - optional=True, - message='ConfidentialInstanceConfig', - ) - deletion_protection: bool = proto.Field( - proto.BOOL, - number=5, - optional=True, - ) - description: str = proto.Field( - proto.STRING, - number=6, - optional=True, - ) - disks: MutableSequence['AttachedDisk'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='AttachedDisk', - ) - display_device: 'DisplayDevice' = proto.Field( - proto.MESSAGE, - number=8, - optional=True, - message='DisplayDevice', - ) - guest_accelerators: MutableSequence['AcceleratorConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='AcceleratorConfig', - ) - hostname: str = proto.Field( - proto.STRING, - number=10, - optional=True, - ) - instance_encryption_key: 'CustomerEncryptionKey' = proto.Field( - proto.MESSAGE, - number=11, - optional=True, - message='CustomerEncryptionKey', - ) - key_revocation_action_type: 'KeyRevocationActionType' = proto.Field( - proto.ENUM, - number=12, - optional=True, - enum='KeyRevocationActionType', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=13, - ) - machine_type: str = proto.Field( - proto.STRING, - number=14, - optional=True, - ) - metadata: 'Metadata' = proto.Field( - proto.MESSAGE, - number=15, - optional=True, - message='Metadata', - ) - min_cpu_platform: str = proto.Field( - proto.STRING, - number=16, - optional=True, - ) - network_interfaces: MutableSequence['NetworkInterface'] = proto.RepeatedField( - proto.MESSAGE, - number=17, - message='NetworkInterface', - ) - network_performance_config: 'NetworkPerformanceConfig' = proto.Field( - proto.MESSAGE, - number=18, - optional=True, - message='NetworkPerformanceConfig', - ) - params: 'InstanceParams' = proto.Field( - proto.MESSAGE, - number=19, - optional=True, - message='InstanceParams', - ) - private_ipv6_google_access: InstancePrivateIpv6GoogleAccess = proto.Field( - proto.ENUM, - number=20, - optional=True, - enum=InstancePrivateIpv6GoogleAccess, - ) - allocation_affinity: 'AllocationAffinity' = proto.Field( - proto.MESSAGE, - number=21, - optional=True, - message='AllocationAffinity', - ) - resource_policies: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=22, - ) - scheduling: 'Scheduling' = proto.Field( - proto.MESSAGE, - number=23, - optional=True, - message='Scheduling', - ) - service_accounts: MutableSequence['ServiceAccount'] = proto.RepeatedField( - proto.MESSAGE, - number=24, - message='ServiceAccount', - ) - tags: 'Tags' = proto.Field( - proto.MESSAGE, - number=26, - optional=True, - message='Tags', - ) - - -class ComputeInstanceTargetEnvironment(proto.Message): - r"""ComputeInstanceTargetEnvironment represents Compute Engine - target environment to be used during restore. - - Attributes: - project (str): - Required. Target project for the Compute - Engine instance. - zone (str): - Required. The zone of the Compute Engine - instance. - """ - - project: str = proto.Field( - proto.STRING, - number=1, - ) - zone: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ComputeInstanceDataSourceProperties(proto.Message): - r"""ComputeInstanceDataSourceProperties represents the properties - of a ComputeEngine resource that are stored in the DataSource. - - Attributes: - name (str): - Name of the compute instance backed up by the - datasource. - description (str): - The description of the Compute Engine - instance. - machine_type (str): - The machine type of the instance. - total_disk_count (int): - The total number of disks attached to the - Instance. - total_disk_size_gb (int): - The sum of all the disk sizes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - machine_type: str = proto.Field( - proto.STRING, - number=3, - ) - total_disk_count: int = proto.Field( - proto.INT64, - number=4, - ) - total_disk_size_gb: int = proto.Field( - proto.INT64, - number=5, - ) - - -class AdvancedMachineFeatures(proto.Message): - r"""Specifies options for controlling advanced machine features. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enable_nested_virtualization (bool): - Optional. Whether to enable nested - virtualization or not (default is false). - - This field is a member of `oneof`_ ``_enable_nested_virtualization``. - threads_per_core (int): - Optional. The number of threads per physical - core. To disable simultaneous multithreading - (SMT) set this to 1. If unset, the maximum - number of threads supported per core by the - underlying processor is assumed. - - This field is a member of `oneof`_ ``_threads_per_core``. - visible_core_count (int): - Optional. The number of physical cores to - expose to an instance. Multiply by the number of - threads per core to compute the total number of - virtual CPUs to expose to the instance. If - unset, the number of cores is inferred from the - instance's nominal CPU count and the underlying - platform's SMT width. - - This field is a member of `oneof`_ ``_visible_core_count``. - enable_uefi_networking (bool): - Optional. Whether to enable UEFI networking - for instance creation. - - This field is a member of `oneof`_ ``_enable_uefi_networking``. - """ - - enable_nested_virtualization: bool = proto.Field( - proto.BOOL, - number=1, - optional=True, - ) - threads_per_core: int = proto.Field( - proto.INT32, - number=2, - optional=True, - ) - visible_core_count: int = proto.Field( - proto.INT32, - number=3, - optional=True, - ) - enable_uefi_networking: bool = proto.Field( - proto.BOOL, - number=4, - optional=True, - ) - - -class ConfidentialInstanceConfig(proto.Message): - r"""A set of Confidential Instance options. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enable_confidential_compute (bool): - Optional. Defines whether the instance should - have confidential compute enabled. - - This field is a member of `oneof`_ ``_enable_confidential_compute``. - """ - - enable_confidential_compute: bool = proto.Field( - proto.BOOL, - number=1, - optional=True, - ) - - -class DisplayDevice(proto.Message): - r"""A set of Display Device options - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - enable_display (bool): - Optional. Enables display for the Compute - Engine VM - - This field is a member of `oneof`_ ``_enable_display``. - """ - - enable_display: bool = proto.Field( - proto.BOOL, - number=1, - optional=True, - ) - - -class AcceleratorConfig(proto.Message): - r"""A specification of the type and number of accelerator cards - attached to the instance. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - accelerator_type (str): - Optional. Full or partial URL of the - accelerator type resource to attach to this - instance. - - This field is a member of `oneof`_ ``_accelerator_type``. - accelerator_count (int): - Optional. The number of the guest accelerator - cards exposed to this instance. - - This field is a member of `oneof`_ ``_accelerator_count``. - """ - - accelerator_type: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - accelerator_count: int = proto.Field( - proto.INT32, - number=2, - optional=True, - ) - - -class CustomerEncryptionKey(proto.Message): - r"""A customer-supplied encryption key. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - raw_key (str): - Optional. Specifies a 256-bit - customer-supplied encryption key. - - This field is a member of `oneof`_ ``key``. - rsa_encrypted_key (str): - Optional. RSA-wrapped 2048-bit - customer-supplied encryption key to either - encrypt or decrypt this resource. - - This field is a member of `oneof`_ ``key``. - kms_key_name (str): - Optional. The name of the encryption key that - is stored in Google Cloud KMS. - - This field is a member of `oneof`_ ``key``. - kms_key_service_account (str): - Optional. The service account being used for - the encryption request for the given KMS key. If - absent, the Compute Engine default service - account is used. - - This field is a member of `oneof`_ ``_kms_key_service_account``. - """ - - raw_key: str = proto.Field( - proto.STRING, - number=1, - oneof='key', - ) - rsa_encrypted_key: str = proto.Field( - proto.STRING, - number=2, - oneof='key', - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=3, - oneof='key', - ) - kms_key_service_account: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - - -class Entry(proto.Message): - r"""A key/value pair to be used for storing metadata. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - key (str): - Optional. Key for the metadata entry. - - This field is a member of `oneof`_ ``_key``. - value (str): - Optional. Value for the metadata entry. These - are free-form strings, and only have meaning as - interpreted by the image running in the - instance. The only restriction placed on values - is that their size must be less than or equal to - 262144 bytes (256 KiB). - - This field is a member of `oneof`_ ``_value``. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - value: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - - -class Metadata(proto.Message): - r"""A metadata key/value entry. - - Attributes: - items (MutableSequence[google.cloud.backupdr_v1.types.Entry]): - Optional. Array of key/value pairs. The total - size of all keys and values must be less than - 512 KB. - """ - - items: MutableSequence['Entry'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Entry', - ) - - -class NetworkInterface(proto.Message): - r"""A network interface resource attached to an instance. - s - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - network (str): - Optional. URL of the VPC network resource for - this instance. - - This field is a member of `oneof`_ ``_network``. - subnetwork (str): - Optional. The URL of the Subnetwork resource - for this instance. - - This field is a member of `oneof`_ ``_subnetwork``. - ip_address (str): - Optional. An IPv4 internal IP address to - assign to the instance for this network - interface. If not specified by the user, an - unused internal IP is assigned by the system. - - This field is a member of `oneof`_ ``_ip_address``. - ipv6_address (str): - Optional. An IPv6 internal network address - for this network interface. To use a static - internal IP address, it must be unused and in - the same region as the instance's zone. If not - specified, Google Cloud will automatically - assign an internal IPv6 address from the - instance's subnetwork. - - This field is a member of `oneof`_ ``_ipv6_address``. - internal_ipv6_prefix_length (int): - Optional. The prefix length of the primary - internal IPv6 range. - - This field is a member of `oneof`_ ``_internal_ipv6_prefix_length``. - name (str): - Output only. [Output Only] The name of the network - interface, which is generated by the server. - - This field is a member of `oneof`_ ``_name``. - access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): - Optional. An array of configurations for this interface. - Currently, only one access config,ONE_TO_ONE_NAT is - supported. If there are no accessConfigs specified, then - this instance will have no external internet access. - ipv6_access_configs (MutableSequence[google.cloud.backupdr_v1.types.AccessConfig]): - Optional. An array of IPv6 access configurations for this - interface. Currently, only one IPv6 access config, - DIRECT_IPV6, is supported. If there is no ipv6AccessConfig - specified, then this instance will have no external IPv6 - Internet access. - alias_ip_ranges (MutableSequence[google.cloud.backupdr_v1.types.AliasIpRange]): - Optional. An array of alias IP ranges for - this network interface. You can only specify - this field for network interfaces in VPC - networks. - stack_type (google.cloud.backupdr_v1.types.NetworkInterface.StackType): - The stack type for this network interface. - - This field is a member of `oneof`_ ``_stack_type``. - ipv6_access_type (google.cloud.backupdr_v1.types.NetworkInterface.Ipv6AccessType): - Optional. [Output Only] One of EXTERNAL, INTERNAL to - indicate whether the IP can be accessed from the Internet. - This field is always inherited from its subnetwork. - - This field is a member of `oneof`_ ``_ipv6_access_type``. - queue_count (int): - Optional. The networking queue count that's - specified by users for the network interface. - Both Rx and Tx queues will be set to this - number. It'll be empty if not specified by the - users. - - This field is a member of `oneof`_ ``_queue_count``. - nic_type (google.cloud.backupdr_v1.types.NetworkInterface.NicType): - Optional. The type of vNIC to be used on this - interface. This may be gVNIC or VirtioNet. - - This field is a member of `oneof`_ ``_nic_type``. - network_attachment (str): - Optional. The URL of the network attachment that this - interface should connect to in the following format: - projects/{project_number}/regions/{region_name}/networkAttachments/{network_attachment_name}. - - This field is a member of `oneof`_ ``_network_attachment``. - """ - class StackType(proto.Enum): - r"""Stack type for this network interface. - - Values: - STACK_TYPE_UNSPECIFIED (0): - Default should be STACK_TYPE_UNSPECIFIED. - IPV4_ONLY (1): - The network interface will be assigned IPv4 - address. - IPV4_IPV6 (2): - The network interface can have both IPv4 and - IPv6 addresses. - """ - STACK_TYPE_UNSPECIFIED = 0 - IPV4_ONLY = 1 - IPV4_IPV6 = 2 - - class Ipv6AccessType(proto.Enum): - r"""IPv6 access type for this network interface. - - Values: - UNSPECIFIED_IPV6_ACCESS_TYPE (0): - IPv6 access type not set. Means this network - interface hasn't been turned on IPv6 yet. - INTERNAL (1): - This network interface can have internal - IPv6. - EXTERNAL (2): - This network interface can have external - IPv6. - """ - UNSPECIFIED_IPV6_ACCESS_TYPE = 0 - INTERNAL = 1 - EXTERNAL = 2 - - class NicType(proto.Enum): - r"""Nic type for this network interface. - - Values: - NIC_TYPE_UNSPECIFIED (0): - Default should be NIC_TYPE_UNSPECIFIED. - VIRTIO_NET (1): - VIRTIO - GVNIC (2): - GVNIC - """ - NIC_TYPE_UNSPECIFIED = 0 - VIRTIO_NET = 1 - GVNIC = 2 - - network: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - subnetwork: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - ip_address: str = proto.Field( - proto.STRING, - number=3, - optional=True, - ) - ipv6_address: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - internal_ipv6_prefix_length: int = proto.Field( - proto.INT32, - number=5, - optional=True, - ) - name: str = proto.Field( - proto.STRING, - number=6, - optional=True, - ) - access_configs: MutableSequence['AccessConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='AccessConfig', - ) - ipv6_access_configs: MutableSequence['AccessConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='AccessConfig', - ) - alias_ip_ranges: MutableSequence['AliasIpRange'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='AliasIpRange', - ) - stack_type: StackType = proto.Field( - proto.ENUM, - number=10, - optional=True, - enum=StackType, - ) - ipv6_access_type: Ipv6AccessType = proto.Field( - proto.ENUM, - number=11, - optional=True, - enum=Ipv6AccessType, - ) - queue_count: int = proto.Field( - proto.INT32, - number=12, - optional=True, - ) - nic_type: NicType = proto.Field( - proto.ENUM, - number=13, - optional=True, - enum=NicType, - ) - network_attachment: str = proto.Field( - proto.STRING, - number=14, - optional=True, - ) - - -class NetworkPerformanceConfig(proto.Message): - r"""Network performance configuration. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - total_egress_bandwidth_tier (google.cloud.backupdr_v1.types.NetworkPerformanceConfig.Tier): - Optional. The tier of the total egress - bandwidth. - - This field is a member of `oneof`_ ``_total_egress_bandwidth_tier``. - """ - class Tier(proto.Enum): - r"""Network performance tier. - - Values: - TIER_UNSPECIFIED (0): - This value is unused. - DEFAULT (1): - Default network performance config. - TIER_1 (2): - Tier 1 network performance config. - """ - TIER_UNSPECIFIED = 0 - DEFAULT = 1 - TIER_1 = 2 - - total_egress_bandwidth_tier: Tier = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum=Tier, - ) - - -class AccessConfig(proto.Message): - r"""An access configuration attached to an instance's network - interface. Only one access config per instance is supported. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.backupdr_v1.types.AccessConfig.AccessType): - Optional. In accessConfigs (IPv4), the default and only - option is ONE_TO_ONE_NAT. In ipv6AccessConfigs, the default - and only option is DIRECT_IPV6. - - This field is a member of `oneof`_ ``_type``. - name (str): - Optional. The name of this access - configuration. - - This field is a member of `oneof`_ ``_name``. - external_ip (str): - Optional. The external IP address of this - access configuration. - - This field is a member of `oneof`_ ``_external_ip``. - external_ipv6 (str): - Optional. The external IPv6 address of this - access configuration. - - This field is a member of `oneof`_ ``_external_ipv6``. - external_ipv6_prefix_length (int): - Optional. The prefix length of the external - IPv6 range. - - This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. - set_public_ptr (bool): - Optional. Specifies whether a public DNS - 'PTR' record should be created to map the - external IP address of the instance to a DNS - domain name. - - This field is a member of `oneof`_ ``_set_public_ptr``. - public_ptr_domain_name (str): - Optional. The DNS domain name for the public - PTR record. - - This field is a member of `oneof`_ ``_public_ptr_domain_name``. - network_tier (google.cloud.backupdr_v1.types.AccessConfig.NetworkTier): - Optional. This signifies the networking tier - used for configuring this access - - This field is a member of `oneof`_ ``_network_tier``. - """ - class AccessType(proto.Enum): - r"""The type of configuration. - - Values: - ACCESS_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - ONE_TO_ONE_NAT (1): - ONE_TO_ONE_NAT - DIRECT_IPV6 (2): - Direct IPv6 access. - """ - ACCESS_TYPE_UNSPECIFIED = 0 - ONE_TO_ONE_NAT = 1 - DIRECT_IPV6 = 2 - - class NetworkTier(proto.Enum): - r"""Network tier property used by addresses, instances and - forwarding rules. - - Values: - NETWORK_TIER_UNSPECIFIED (0): - Default value. This value is unused. - PREMIUM (1): - High quality, Google-grade network tier, - support for all networking products. - STANDARD (2): - Public internet quality, only limited support - for other networking products. - """ - NETWORK_TIER_UNSPECIFIED = 0 - PREMIUM = 1 - STANDARD = 2 - - type_: AccessType = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum=AccessType, - ) - name: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - external_ip: str = proto.Field( - proto.STRING, - number=3, - optional=True, - ) - external_ipv6: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - external_ipv6_prefix_length: int = proto.Field( - proto.INT32, - number=5, - optional=True, - ) - set_public_ptr: bool = proto.Field( - proto.BOOL, - number=6, - optional=True, - ) - public_ptr_domain_name: str = proto.Field( - proto.STRING, - number=7, - optional=True, - ) - network_tier: NetworkTier = proto.Field( - proto.ENUM, - number=8, - optional=True, - enum=NetworkTier, - ) - - -class AliasIpRange(proto.Message): - r"""An alias IP range attached to an instance's network - interface. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - ip_cidr_range (str): - Optional. The IP alias ranges to allocate for - this interface. - - This field is a member of `oneof`_ ``_ip_cidr_range``. - subnetwork_range_name (str): - Optional. The name of a subnetwork secondary - IP range from which to allocate an IP alias - range. If not specified, the primary range of - the subnetwork is used. - - This field is a member of `oneof`_ ``_subnetwork_range_name``. - """ - - ip_cidr_range: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - subnetwork_range_name: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - - -class InstanceParams(proto.Message): - r"""Additional instance params. - - Attributes: - resource_manager_tags (MutableMapping[str, str]): - Optional. Resource manager tags to be bound - to the instance. - """ - - resource_manager_tags: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - - -class AllocationAffinity(proto.Message): - r"""Specifies the reservations that this instance can consume - from. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - consume_allocation_type (google.cloud.backupdr_v1.types.AllocationAffinity.Type): - Optional. Specifies the type of reservation - from which this instance can consume - - This field is a member of `oneof`_ ``_consume_allocation_type``. - key (str): - Optional. Corresponds to the label key of a - reservation resource. - - This field is a member of `oneof`_ ``_key``. - values (MutableSequence[str]): - Optional. Corresponds to the label values of - a reservation resource. - """ - class Type(proto.Enum): - r"""Indicates whether to consume from a reservation or not. - - Values: - TYPE_UNSPECIFIED (0): - Default value. This value is unused. - NO_RESERVATION (1): - Do not consume from any allocated capacity. - ANY_RESERVATION (2): - Consume any allocation available. - SPECIFIC_RESERVATION (3): - Must consume from a specific reservation. - Must specify key value fields for specifying the - reservations. - """ - TYPE_UNSPECIFIED = 0 - NO_RESERVATION = 1 - ANY_RESERVATION = 2 - SPECIFIC_RESERVATION = 3 - - consume_allocation_type: Type = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum=Type, - ) - key: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class Scheduling(proto.Message): - r"""Sets the scheduling options for an Instance. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - on_host_maintenance (google.cloud.backupdr_v1.types.Scheduling.OnHostMaintenance): - Optional. Defines the maintenance behavior - for this instance. - - This field is a member of `oneof`_ ``_on_host_maintenance``. - automatic_restart (bool): - Optional. Specifies whether the instance - should be automatically restarted if it is - terminated by Compute Engine (not terminated by - a user). - - This field is a member of `oneof`_ ``_automatic_restart``. - preemptible (bool): - Optional. Defines whether the instance is - preemptible. - - This field is a member of `oneof`_ ``_preemptible``. - node_affinities (MutableSequence[google.cloud.backupdr_v1.types.Scheduling.NodeAffinity]): - Optional. A set of node affinity and - anti-affinity configurations. Overrides - reservationAffinity. - min_node_cpus (int): - Optional. The minimum number of virtual CPUs - this instance will consume when running on a - sole-tenant node. - - This field is a member of `oneof`_ ``_min_node_cpus``. - provisioning_model (google.cloud.backupdr_v1.types.Scheduling.ProvisioningModel): - Optional. Specifies the provisioning model of - the instance. - - This field is a member of `oneof`_ ``_provisioning_model``. - instance_termination_action (google.cloud.backupdr_v1.types.Scheduling.InstanceTerminationAction): - Optional. Specifies the termination action - for the instance. - - This field is a member of `oneof`_ ``_instance_termination_action``. - local_ssd_recovery_timeout (google.cloud.backupdr_v1.types.SchedulingDuration): - Optional. Specifies the maximum amount of - time a Local Ssd Vm should wait while recovery - of the Local Ssd state is attempted. Its value - should be in between 0 and 168 hours with hour - granularity and the default value being 1 hour. - - This field is a member of `oneof`_ ``_local_ssd_recovery_timeout``. - """ - class OnHostMaintenance(proto.Enum): - r"""Defines the maintenance behavior for this instance= - - Values: - ON_HOST_MAINTENANCE_UNSPECIFIED (0): - Default value. This value is unused. - TERMINATE (1): - Tells Compute Engine to terminate and - (optionally) restart the instance away from the - maintenance activity. - MIGRATE (1000): - Default, Allows Compute Engine to - automatically migrate instances out of the way - of maintenance events. - """ - ON_HOST_MAINTENANCE_UNSPECIFIED = 0 - TERMINATE = 1 - MIGRATE = 1000 - - class ProvisioningModel(proto.Enum): - r"""Defines the provisioning model for an instance. - - Values: - PROVISIONING_MODEL_UNSPECIFIED (0): - Default value. This value is not used. - STANDARD (1): - Standard provisioning with user controlled - runtime, no discounts. - SPOT (2): - Heavily discounted, no guaranteed runtime. - """ - PROVISIONING_MODEL_UNSPECIFIED = 0 - STANDARD = 1 - SPOT = 2 - - class InstanceTerminationAction(proto.Enum): - r"""Defines the supported termination actions for an instance. - - Values: - INSTANCE_TERMINATION_ACTION_UNSPECIFIED (0): - Default value. This value is unused. - DELETE (1): - Delete the VM. - STOP (2): - Stop the VM without storing in-memory - content. default action. - """ - INSTANCE_TERMINATION_ACTION_UNSPECIFIED = 0 - DELETE = 1 - STOP = 2 - - class NodeAffinity(proto.Message): - r"""Node Affinity: the configuration of desired nodes onto which - this Instance could be scheduled. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - key (str): - Optional. Corresponds to the label key of - Node resource. - - This field is a member of `oneof`_ ``_key``. - operator (google.cloud.backupdr_v1.types.Scheduling.NodeAffinity.Operator): - Optional. Defines the operation of node - selection. - - This field is a member of `oneof`_ ``_operator``. - values (MutableSequence[str]): - Optional. Corresponds to the label values of - Node resource. - """ - class Operator(proto.Enum): - r"""Defines the type of node selections. - - Values: - OPERATOR_UNSPECIFIED (0): - Default value. This value is unused. - IN (1): - Requires Compute Engine to seek for matched - nodes. - NOT_IN (2): - Requires Compute Engine to avoid certain - nodes. - """ - OPERATOR_UNSPECIFIED = 0 - IN = 1 - NOT_IN = 2 - - key: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - operator: 'Scheduling.NodeAffinity.Operator' = proto.Field( - proto.ENUM, - number=2, - optional=True, - enum='Scheduling.NodeAffinity.Operator', - ) - values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - on_host_maintenance: OnHostMaintenance = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum=OnHostMaintenance, - ) - automatic_restart: bool = proto.Field( - proto.BOOL, - number=2, - optional=True, - ) - preemptible: bool = proto.Field( - proto.BOOL, - number=3, - optional=True, - ) - node_affinities: MutableSequence[NodeAffinity] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=NodeAffinity, - ) - min_node_cpus: int = proto.Field( - proto.INT32, - number=5, - optional=True, - ) - provisioning_model: ProvisioningModel = proto.Field( - proto.ENUM, - number=6, - optional=True, - enum=ProvisioningModel, - ) - instance_termination_action: InstanceTerminationAction = proto.Field( - proto.ENUM, - number=7, - optional=True, - enum=InstanceTerminationAction, - ) - local_ssd_recovery_timeout: 'SchedulingDuration' = proto.Field( - proto.MESSAGE, - number=10, - optional=True, - message='SchedulingDuration', - ) - - -class SchedulingDuration(proto.Message): - r"""A SchedulingDuration represents a fixed-length span of time - represented as a count of seconds and fractions of seconds at - nanosecond resolution. It is independent of any calendar and - concepts like "day" or "month". Range is approximately 10,000 - years. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - seconds (int): - Optional. Span of time at a resolution of a - second. - - This field is a member of `oneof`_ ``_seconds``. - nanos (int): - Optional. Span of time that's a fraction of a - second at nanosecond resolution. - - This field is a member of `oneof`_ ``_nanos``. - """ - - seconds: int = proto.Field( - proto.INT64, - number=1, - optional=True, - ) - nanos: int = proto.Field( - proto.INT32, - number=2, - optional=True, - ) - - -class ServiceAccount(proto.Message): - r"""A service account. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - email (str): - Optional. Email address of the service - account. - - This field is a member of `oneof`_ ``_email``. - scopes (MutableSequence[str]): - Optional. The list of scopes to be made - available for this service account. - """ - - email: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class Tags(proto.Message): - r"""A set of instance tags. - - Attributes: - items (MutableSequence[str]): - Optional. An array of tags. Each tag must be - 1-63 characters long, and comply with RFC1035. - """ - - items: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class AttachedDisk(proto.Message): - r"""An instance-attached disk resource. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - initialize_params (google.cloud.backupdr_v1.types.AttachedDisk.InitializeParams): - Optional. Specifies the parameters to - initialize this disk. - - This field is a member of `oneof`_ ``_initialize_params``. - device_name (str): - Optional. This is used as an identifier for the disks. This - is the unique name has to provided to modify disk parameters - like disk_name and replica_zones (in case of RePDs) - - This field is a member of `oneof`_ ``_device_name``. - kind (str): - Optional. Type of the resource. - - This field is a member of `oneof`_ ``_kind``. - disk_type_deprecated (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): - Specifies the type of the disk. - - This field is a member of `oneof`_ ``_disk_type_deprecated``. - mode (google.cloud.backupdr_v1.types.AttachedDisk.DiskMode): - Optional. The mode in which to attach this - disk. - - This field is a member of `oneof`_ ``_mode``. - source (str): - Optional. Specifies a valid partial or full - URL to an existing Persistent Disk resource. - - This field is a member of `oneof`_ ``_source``. - index (int): - Optional. A zero-based index to this disk, - where 0 is reserved for the boot disk. - - This field is a member of `oneof`_ ``_index``. - boot (bool): - Optional. Indicates that this is a boot disk. - The virtual machine will use the first partition - of the disk for its root filesystem. - - This field is a member of `oneof`_ ``_boot``. - auto_delete (bool): - Optional. Specifies whether the disk will be - auto-deleted when the instance is deleted (but - not when the disk is detached from the - instance). - - This field is a member of `oneof`_ ``_auto_delete``. - license_ (MutableSequence[str]): - Optional. Any valid publicly visible - licenses. - disk_interface (google.cloud.backupdr_v1.types.AttachedDisk.DiskInterface): - Optional. Specifies the disk interface to use - for attaching this disk. - - This field is a member of `oneof`_ ``_disk_interface``. - guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): - Optional. A list of features to enable on the - guest operating system. Applicable only for - bootable images. - disk_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): - Optional. Encrypts or decrypts a disk using a - customer-supplied encryption key. - - This field is a member of `oneof`_ ``_disk_encryption_key``. - disk_size_gb (int): - Optional. The size of the disk in GB. - - This field is a member of `oneof`_ ``_disk_size_gb``. - saved_state (google.cloud.backupdr_v1.types.AttachedDisk.DiskSavedState): - Optional. Output only. The state of the disk. - - This field is a member of `oneof`_ ``_saved_state``. - disk_type (str): - Optional. Output only. The URI of the disk - type resource. For example: - projects/project/zones/zone/diskTypes/pd-standard - or pd-ssd - - This field is a member of `oneof`_ ``_disk_type``. - type_ (google.cloud.backupdr_v1.types.AttachedDisk.DiskType): - Optional. Specifies the type of the disk. - - This field is a member of `oneof`_ ``_type``. - """ - class DiskType(proto.Enum): - r"""List of the Disk Types. - - Values: - DISK_TYPE_UNSPECIFIED (0): - Default value, which is unused. - SCRATCH (1): - A scratch disk type. - PERSISTENT (2): - A persistent disk type. - """ - DISK_TYPE_UNSPECIFIED = 0 - SCRATCH = 1 - PERSISTENT = 2 - - class DiskMode(proto.Enum): - r"""List of the Disk Modes. - - Values: - DISK_MODE_UNSPECIFIED (0): - Default value, which is unused. - READ_WRITE (1): - Attaches this disk in read-write mode. Only - one virtual machine at a time can be attached to - a disk in read-write mode. - READ_ONLY (2): - Attaches this disk in read-only mode. - Multiple virtual machines can use a disk in - read-only mode at a time. - LOCKED (3): - The disk is locked for administrative - reasons. Nobody else can use the disk. This mode - is used (for example) when taking a snapshot of - a disk to prevent mounting the disk while it is - being snapshotted. - """ - DISK_MODE_UNSPECIFIED = 0 - READ_WRITE = 1 - READ_ONLY = 2 - LOCKED = 3 - - class DiskInterface(proto.Enum): - r"""List of the Disk Interfaces. - - Values: - DISK_INTERFACE_UNSPECIFIED (0): - Default value, which is unused. - SCSI (1): - SCSI Disk Interface. - NVME (2): - NVME Disk Interface. - NVDIMM (3): - NVDIMM Disk Interface. - ISCSI (4): - ISCSI Disk Interface. - """ - DISK_INTERFACE_UNSPECIFIED = 0 - SCSI = 1 - NVME = 2 - NVDIMM = 3 - ISCSI = 4 - - class DiskSavedState(proto.Enum): - r"""List of the states of the Disk. - - Values: - DISK_SAVED_STATE_UNSPECIFIED (0): - Default Disk state has not been preserved. - PRESERVED (1): - Disk state has been preserved. - """ - DISK_SAVED_STATE_UNSPECIFIED = 0 - PRESERVED = 1 - - class InitializeParams(proto.Message): - r"""Specifies the parameters to initialize this disk. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - disk_name (str): - Optional. Specifies the disk name. If not - specified, the default is to use the name of the - instance. - - This field is a member of `oneof`_ ``_disk_name``. - replica_zones (MutableSequence[str]): - Optional. URL of the zone where the disk - should be created. Required for each regional - disk associated with the instance. - """ - - disk_name: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - replica_zones: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - initialize_params: InitializeParams = proto.Field( - proto.MESSAGE, - number=1, - optional=True, - message=InitializeParams, - ) - device_name: str = proto.Field( - proto.STRING, - number=4, - optional=True, - ) - kind: str = proto.Field( - proto.STRING, - number=5, - optional=True, - ) - disk_type_deprecated: DiskType = proto.Field( - proto.ENUM, - number=6, - optional=True, - enum=DiskType, - ) - mode: DiskMode = proto.Field( - proto.ENUM, - number=7, - optional=True, - enum=DiskMode, - ) - source: str = proto.Field( - proto.STRING, - number=8, - optional=True, - ) - index: int = proto.Field( - proto.INT64, - number=9, - optional=True, - ) - boot: bool = proto.Field( - proto.BOOL, - number=10, - optional=True, - ) - auto_delete: bool = proto.Field( - proto.BOOL, - number=11, - optional=True, - ) - license_: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=12, - ) - disk_interface: DiskInterface = proto.Field( - proto.ENUM, - number=13, - optional=True, - enum=DiskInterface, - ) - guest_os_feature: MutableSequence['GuestOsFeature'] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='GuestOsFeature', - ) - disk_encryption_key: 'CustomerEncryptionKey' = proto.Field( - proto.MESSAGE, - number=15, - optional=True, - message='CustomerEncryptionKey', - ) - disk_size_gb: int = proto.Field( - proto.INT64, - number=16, - optional=True, - ) - saved_state: DiskSavedState = proto.Field( - proto.ENUM, - number=17, - optional=True, - enum=DiskSavedState, - ) - disk_type: str = proto.Field( - proto.STRING, - number=18, - optional=True, - ) - type_: DiskType = proto.Field( - proto.ENUM, - number=19, - optional=True, - enum=DiskType, - ) - - -class GuestOsFeature(proto.Message): - r"""Feature type of the Guest OS. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - type_ (google.cloud.backupdr_v1.types.GuestOsFeature.FeatureType): - The ID of a supported feature. - - This field is a member of `oneof`_ ``_type``. - """ - class FeatureType(proto.Enum): - r"""List of the Feature Types. - - Values: - FEATURE_TYPE_UNSPECIFIED (0): - Default value, which is unused. - VIRTIO_SCSI_MULTIQUEUE (1): - VIRTIO_SCSI_MULTIQUEUE feature type. - WINDOWS (2): - WINDOWS feature type. - MULTI_IP_SUBNET (3): - MULTI_IP_SUBNET feature type. - UEFI_COMPATIBLE (4): - UEFI_COMPATIBLE feature type. - SECURE_BOOT (5): - SECURE_BOOT feature type. - GVNIC (6): - GVNIC feature type. - SEV_CAPABLE (7): - SEV_CAPABLE feature type. - BARE_METAL_LINUX_COMPATIBLE (8): - BARE_METAL_LINUX_COMPATIBLE feature type. - SUSPEND_RESUME_COMPATIBLE (9): - SUSPEND_RESUME_COMPATIBLE feature type. - SEV_LIVE_MIGRATABLE (10): - SEV_LIVE_MIGRATABLE feature type. - SEV_SNP_CAPABLE (11): - SEV_SNP_CAPABLE feature type. - TDX_CAPABLE (12): - TDX_CAPABLE feature type. - IDPF (13): - IDPF feature type. - SEV_LIVE_MIGRATABLE_V2 (14): - SEV_LIVE_MIGRATABLE_V2 feature type. - """ - FEATURE_TYPE_UNSPECIFIED = 0 - VIRTIO_SCSI_MULTIQUEUE = 1 - WINDOWS = 2 - MULTI_IP_SUBNET = 3 - UEFI_COMPATIBLE = 4 - SECURE_BOOT = 5 - GVNIC = 6 - SEV_CAPABLE = 7 - BARE_METAL_LINUX_COMPATIBLE = 8 - SUSPEND_RESUME_COMPATIBLE = 9 - SEV_LIVE_MIGRATABLE = 10 - SEV_SNP_CAPABLE = 11 - TDX_CAPABLE = 12 - IDPF = 13 - SEV_LIVE_MIGRATABLE_V2 = 14 - - type_: FeatureType = proto.Field( - proto.ENUM, - number=1, - optional=True, - enum=FeatureType, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/mypy.ini b/owl-bot-staging/google-cloud-backupdr/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-backupdr/v1/noxfile.py b/owl-bot-staging/google-cloud-backupdr/v1/noxfile.py deleted file mode 100644 index e50a8cc41925..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-backupdr' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/backupdr_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/backupdr_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py deleted file mode 100644 index d60bb539a629..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupPlanAssociation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_create_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - backup_plan_association = backupdr_v1.BackupPlanAssociation() - backup_plan_association.resource_type = "resource_type_value" - backup_plan_association.resource = "resource_value" - backup_plan_association.backup_plan = "backup_plan_value" - - request = backupdr_v1.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", - backup_plan_association=backup_plan_association, - ) - - # Make the request - operation = client.create_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py deleted file mode 100644 index 2c439946c9a7..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupPlanAssociation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_create_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - backup_plan_association = backupdr_v1.BackupPlanAssociation() - backup_plan_association.resource_type = "resource_type_value" - backup_plan_association.resource = "resource_value" - backup_plan_association.backup_plan = "backup_plan_value" - - request = backupdr_v1.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", - backup_plan_association=backup_plan_association, - ) - - # Make the request - operation = client.create_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py deleted file mode 100644 index ff546daa2ac6..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupPlan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_create_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - backup_plan = backupdr_v1.BackupPlan() - backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" - backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 - backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 - backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" - backup_plan.backup_rules.rule_id = "rule_id_value" - backup_plan.backup_rules.backup_retention_days = 2237 - backup_plan.resource_type = "resource_type_value" - backup_plan.backup_vault = "backup_vault_value" - - request = backupdr_v1.CreateBackupPlanRequest( - parent="parent_value", - backup_plan_id="backup_plan_id_value", - backup_plan=backup_plan, - ) - - # Make the request - operation = client.create_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py deleted file mode 100644 index 5c648a085be1..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupPlan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_create_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - backup_plan = backupdr_v1.BackupPlan() - backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" - backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 - backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 - backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" - backup_plan.backup_rules.rule_id = "rule_id_value" - backup_plan.backup_rules.backup_retention_days = 2237 - backup_plan.resource_type = "resource_type_value" - backup_plan.backup_vault = "backup_vault_value" - - request = backupdr_v1.CreateBackupPlanRequest( - parent="parent_value", - backup_plan_id="backup_plan_id_value", - backup_plan=backup_plan, - ) - - # Make the request - operation = client.create_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py deleted file mode 100644 index 1acf666c5d38..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateBackupVault_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_create_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateBackupVaultRequest( - parent="parent_value", - backup_vault_id="backup_vault_id_value", - ) - - # Make the request - operation = client.create_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateBackupVault_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py deleted file mode 100644 index 5a1abda3a275..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateBackupVault_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_create_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateBackupVaultRequest( - parent="parent_value", - backup_vault_id="backup_vault_id_value", - ) - - # Make the request - operation = client.create_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateBackupVault_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py deleted file mode 100644 index 62c311262130..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateManagementServer -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateManagementServer_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_create_management_server(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateManagementServerRequest( - parent="parent_value", - management_server_id="management_server_id_value", - ) - - # Make the request - operation = client.create_management_server(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateManagementServer_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py deleted file mode 100644 index f43121723653..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateManagementServer -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_CreateManagementServer_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_create_management_server(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.CreateManagementServerRequest( - parent="parent_value", - management_server_id="management_server_id_value", - ) - - # Make the request - operation = client.create_management_server(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_CreateManagementServer_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py deleted file mode 100644 index 346d24a9543e..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_delete_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackup_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py deleted file mode 100644 index 21af239763d3..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupPlanAssociation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_delete_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py deleted file mode 100644 index 18c4ca0cae3b..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupPlanAssociation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_delete_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py deleted file mode 100644 index 3423852e66cd..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupPlan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_delete_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py deleted file mode 100644 index aaabe6e55265..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupPlan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_delete_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupPlanRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_plan(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py deleted file mode 100644 index dee368f1cd32..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_delete_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackup_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py deleted file mode 100644 index a70379011f44..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_delete_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupVaultRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py deleted file mode 100644 index 2b824ef4088e..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_delete_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteBackupVaultRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py deleted file mode 100644 index 8b32fe64f45f..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteManagementServer -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteManagementServer_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_delete_management_server(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteManagementServerRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_management_server(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteManagementServer_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py deleted file mode 100644 index 13d4413175e3..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteManagementServer -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_DeleteManagementServer_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_delete_management_server(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.DeleteManagementServerRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_management_server(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_DeleteManagementServer_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py deleted file mode 100644 index ab7dc9c365b4..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchUsableBackupVaults -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_fetch_usable_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.FetchUsableBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.fetch_usable_backup_vaults(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py deleted file mode 100644 index 0e4abb2342d5..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FetchUsableBackupVaults -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_fetch_usable_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.FetchUsableBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.fetch_usable_backup_vaults(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py deleted file mode 100644 index 27f69f503b1b..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_get_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackup_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py deleted file mode 100644 index 666e503e039c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupPlanAssociation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_get_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_plan_association(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py deleted file mode 100644 index f16d4b5dcdc6..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupPlanAssociation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_get_backup_plan_association(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanAssociationRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_plan_association(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py deleted file mode 100644 index bbca5985c4d3..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupPlan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackupPlan_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_get_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_plan(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackupPlan_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py deleted file mode 100644 index 3e6f35ccdc90..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupPlan -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackupPlan_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_get_backup_plan(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_plan(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackupPlan_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py deleted file mode 100644 index 064cbac8920e..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_get_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackup_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py deleted file mode 100644 index 95d30ed5bf46..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackupVault_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_get_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupVaultRequest( - name="name_value", - ) - - # Make the request - response = await client.get_backup_vault(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackupVault_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py deleted file mode 100644 index 814ccccaf4a0..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetBackupVault_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_get_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetBackupVaultRequest( - name="name_value", - ) - - # Make the request - response = client.get_backup_vault(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetBackupVault_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py deleted file mode 100644 index 864ee90db114..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataSource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetDataSource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_get_data_source(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_source(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetDataSource_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py deleted file mode 100644 index 95f18218de42..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataSource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetDataSource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_get_data_source(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_source(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetDataSource_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py deleted file mode 100644 index e2e950bcd22d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetManagementServer -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetManagementServer_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_get_management_server(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.GetManagementServerRequest( - name="name_value", - ) - - # Make the request - response = await client.get_management_server(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetManagementServer_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py deleted file mode 100644 index 413900917aa1..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetManagementServer -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_GetManagementServer_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_get_management_server(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.GetManagementServerRequest( - name="name_value", - ) - - # Make the request - response = client.get_management_server(request=request) - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_GetManagementServer_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py deleted file mode 100644 index c938b4515018..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InitializeService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_InitializeService_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_initialize_service(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.InitializeServiceRequest( - name="name_value", - resource_type="resource_type_value", - ) - - # Make the request - operation = client.initialize_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_InitializeService_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py deleted file mode 100644 index 8bd114b067c2..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for InitializeService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_InitializeService_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_initialize_service(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.InitializeServiceRequest( - name="name_value", - resource_type="resource_type_value", - ) - - # Make the request - operation = client.initialize_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_InitializeService_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py deleted file mode 100644 index e6cfd3cc039c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupPlanAssociations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_list_backup_plan_associations(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlanAssociationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plan_associations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py deleted file mode 100644 index 39b135ce9944..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupPlanAssociations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_list_backup_plan_associations(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlanAssociationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plan_associations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py deleted file mode 100644 index f09593b5796b..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupPlans -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackupPlans_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_list_backup_plans(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plans(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackupPlans_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py deleted file mode 100644 index ab2cab9b1701..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupPlans -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackupPlans_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_list_backup_plans(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupPlansRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_plans(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackupPlans_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py deleted file mode 100644 index 675c345b810c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupVaults -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackupVaults_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_list_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_vaults(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackupVaults_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py deleted file mode 100644 index 27b1faa5debb..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackupVaults -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackupVaults_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_list_backup_vaults(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupVaultsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backup_vaults(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackupVaults_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py deleted file mode 100644 index da6366f3b095..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackups_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_list_backups(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackups_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py deleted file mode 100644 index 18387f7371a5..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBackups -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListBackups_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_list_backups(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListBackupsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_backups(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListBackups_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py deleted file mode 100644 index f5cb4d5a4477..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListDataSources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_list_data_sources(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListDataSources_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py deleted file mode 100644 index 36680bf32e15..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListDataSources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_list_data_sources(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListDataSources_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py deleted file mode 100644 index 2d3822150994..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListManagementServers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListManagementServers_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_list_management_servers(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.ListManagementServersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_management_servers(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListManagementServers_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py deleted file mode 100644 index 73400b45d63e..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListManagementServers -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_ListManagementServers_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_list_management_servers(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.ListManagementServersRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_management_servers(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END backupdr_v1_generated_BackupDR_ListManagementServers_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py deleted file mode 100644 index 9bdfab3c21bc..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_RestoreBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_restore_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() - compute_instance_target_environment.project = "project_value" - compute_instance_target_environment.zone = "zone_value" - - request = backupdr_v1.RestoreBackupRequest( - compute_instance_target_environment=compute_instance_target_environment, - name="name_value", - ) - - # Make the request - operation = client.restore_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_RestoreBackup_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py deleted file mode 100644 index 6b503fb4a546..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_RestoreBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_restore_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - compute_instance_target_environment = backupdr_v1.ComputeInstanceTargetEnvironment() - compute_instance_target_environment.project = "project_value" - compute_instance_target_environment.zone = "zone_value" - - request = backupdr_v1.RestoreBackupRequest( - compute_instance_target_environment=compute_instance_target_environment, - name="name_value", - ) - - # Make the request - operation = client.restore_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_RestoreBackup_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py deleted file mode 100644 index 6c6c641d54ee..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TriggerBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_TriggerBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_trigger_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.TriggerBackupRequest( - name="name_value", - rule_id="rule_id_value", - ) - - # Make the request - operation = client.trigger_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_TriggerBackup_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py deleted file mode 100644 index 359727f2dd1c..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TriggerBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_TriggerBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_trigger_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.TriggerBackupRequest( - name="name_value", - rule_id="rule_id_value", - ) - - # Make the request - operation = client.trigger_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_TriggerBackup_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py deleted file mode 100644 index a1a2fcc0ce51..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_UpdateBackup_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_update_backup(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupRequest( - ) - - # Make the request - operation = client.update_backup(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_UpdateBackup_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py deleted file mode 100644 index 9ea7e26404d0..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackup -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_UpdateBackup_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_update_backup(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupRequest( - ) - - # Make the request - operation = client.update_backup(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_UpdateBackup_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py deleted file mode 100644 index 386f2ca872d3..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_update_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupVaultRequest( - ) - - # Make the request - operation = client.update_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py deleted file mode 100644 index ab3690e1df33..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBackupVault -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_update_backup_vault(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateBackupVaultRequest( - ) - - # Make the request - operation = client.update_backup_vault(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py deleted file mode 100644 index 986de214c53d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataSource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_UpdateDataSource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -async def sample_update_data_source(): - # Create a client - client = backupdr_v1.BackupDRAsyncClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateDataSourceRequest( - ) - - # Make the request - operation = client.update_data_source(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_UpdateDataSource_async] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py deleted file mode 100644 index d20aa5d93848..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataSource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-backupdr - - -# [START backupdr_v1_generated_BackupDR_UpdateDataSource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import backupdr_v1 - - -def sample_update_data_source(): - # Create a client - client = backupdr_v1.BackupDRClient() - - # Initialize request argument(s) - request = backupdr_v1.UpdateDataSourceRequest( - ) - - # Make the request - operation = client.update_data_source(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END backupdr_v1_generated_BackupDR_UpdateDataSource_sync] diff --git a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json deleted file mode 100644 index 0f3972263529..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ /dev/null @@ -1,4611 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.backupdr.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-backupdr", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan_association", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateBackupPlanAssociation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_plan_association", - "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" - }, - { - "name": "backup_plan_association_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_backup_plan_association" - }, - "description": "Sample for CreateBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan_association", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlanAssociation", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateBackupPlanAssociation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_plan_association", - "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" - }, - { - "name": "backup_plan_association_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_backup_plan_association" - }, - "description": "Sample for CreateBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_plan", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateBackupPlan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_plan", - "type": "google.cloud.backupdr_v1.types.BackupPlan" - }, - { - "name": "backup_plan_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_backup_plan" - }, - "description": "Sample for CreateBackupPlan", - "file": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_async", - "segments": [ - { - "end": 67, - "start": 27, - "type": "FULL" - }, - { - "end": 67, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 57, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 64, - "start": 58, - "type": "REQUEST_EXECUTION" - }, - { - "end": 68, - "start": 65, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_backup_plan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_plan", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupPlan", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateBackupPlan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateBackupPlanRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_plan", - "type": "google.cloud.backupdr_v1.types.BackupPlan" - }, - { - "name": "backup_plan_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_backup_plan" - }, - "description": "Sample for CreateBackupPlan", - "file": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupPlan_sync", - "segments": [ - { - "end": 67, - "start": 27, - "type": "FULL" - }, - { - "end": 67, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 57, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 64, - "start": 58, - "type": "REQUEST_EXECUTION" - }, - { - "end": 68, - "start": 65, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_backup_plan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_vault", - "type": "google.cloud.backupdr_v1.types.BackupVault" - }, - { - "name": "backup_vault_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_backup_vault" - }, - "description": "Sample for CreateBackupVault", - "file": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_backup_vault_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateBackupVaultRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "backup_vault", - "type": "google.cloud.backupdr_v1.types.BackupVault" - }, - { - "name": "backup_vault_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_backup_vault" - }, - "description": "Sample for CreateBackupVault", - "file": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateBackupVault_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_backup_vault_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.create_management_server", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateManagementServer" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" - }, - { - "name": "management_server_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_management_server" - }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.create_management_server", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.CreateManagementServer", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "CreateManagementServer" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.CreateManagementServerRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "management_server", - "type": "google.cloud.backupdr_v1.types.ManagementServer" - }, - { - "name": "management_server_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_management_server" - }, - "description": "Sample for CreateManagementServer", - "file": "backupdr_v1_generated_backup_dr_create_management_server_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_CreateManagementServer_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_create_management_server_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan_association", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackupPlanAssociation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_backup_plan_association" - }, - "description": "Sample for DeleteBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan_association", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlanAssociation", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackupPlanAssociation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanAssociationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_backup_plan_association" - }, - "description": "Sample for DeleteBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_plan", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackupPlan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_backup_plan" - }, - "description": "Sample for DeleteBackupPlan", - "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_plan", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupPlan", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackupPlan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupPlanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_backup_plan" - }, - "description": "Sample for DeleteBackupPlan", - "file": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_backup_vault" - }, - "description": "Sample for DeleteBackupVault", - "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupVaultRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_backup_vault" - }, - "description": "Sample for DeleteBackupVault", - "file": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackupVault_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "backupdr_v1_generated_backup_dr_delete_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_backup" - }, - "description": "Sample for DeleteBackup", - "file": "backupdr_v1_generated_backup_dr_delete_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteBackup_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.delete_management_server", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteManagementServer" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_management_server" - }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.delete_management_server", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.DeleteManagementServer", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "DeleteManagementServer" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.DeleteManagementServerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_management_server" - }, - "description": "Sample for DeleteManagementServer", - "file": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_DeleteManagementServer_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_delete_management_server_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "FetchUsableBackupVaults" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", - "shortName": "fetch_usable_backup_vaults" - }, - "description": "Sample for FetchUsableBackupVaults", - "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "FetchUsableBackupVaults" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", - "shortName": "fetch_usable_backup_vaults" - }, - "description": "Sample for FetchUsableBackupVaults", - "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackupPlanAssociation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", - "shortName": "get_backup_plan_association" - }, - "description": "Sample for GetBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackupPlanAssociation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", - "shortName": "get_backup_plan_association" - }, - "description": "Sample for GetBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackupPlan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlan", - "shortName": "get_backup_plan" - }, - "description": "Sample for GetBackupPlan", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackupPlan" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlan", - "shortName": "get_backup_plan" - }, - "description": "Sample for GetBackupPlan", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.BackupVault", - "shortName": "get_backup_vault" - }, - "description": "Sample for GetBackupVault", - "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.BackupVault", - "shortName": "get_backup_vault" - }, - "description": "Sample for GetBackupVault", - "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.Backup", - "shortName": "get_backup" - }, - "description": "Sample for GetBackup", - "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetDataSource" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.DataSource", - "shortName": "get_data_source" - }, - "description": "Sample for GetDataSource", - "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetDataSource" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.DataSource", - "shortName": "get_data_source" - }, - "description": "Sample for GetDataSource", - "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetManagementServer" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" - }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "GetManagementServer" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" - }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.initialize_service", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "InitializeService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "initialize_service" - }, - "description": "Sample for InitializeService", - "file": "backupdr_v1_generated_backup_dr_initialize_service_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_initialize_service_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.initialize_service", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "InitializeService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "initialize_service" - }, - "description": "Sample for InitializeService", - "file": "backupdr_v1_generated_backup_dr_initialize_service_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_initialize_service_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackupPlanAssociations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", - "shortName": "list_backup_plan_associations" - }, - "description": "Sample for ListBackupPlanAssociations", - "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackupPlanAssociations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", - "shortName": "list_backup_plan_associations" - }, - "description": "Sample for ListBackupPlanAssociations", - "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackupPlans" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", - "shortName": "list_backup_plans" - }, - "description": "Sample for ListBackupPlans", - "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackupPlans" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", - "shortName": "list_backup_plans" - }, - "description": "Sample for ListBackupPlans", - "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackupVaults" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", - "shortName": "list_backup_vaults" - }, - "description": "Sample for ListBackupVaults", - "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackupVaults" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", - "shortName": "list_backup_vaults" - }, - "description": "Sample for ListBackupVaults", - "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListBackups" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", - "shortName": "list_backups" - }, - "description": "Sample for ListBackups", - "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", - "shortName": "list_data_sources" - }, - "description": "Sample for ListDataSources", - "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", - "shortName": "list_data_sources" - }, - "description": "Sample for ListDataSources", - "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListManagementServers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", - "shortName": "list_management_servers" - }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "ListManagementServers" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", - "shortName": "list_management_servers" - }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "RestoreBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restore_backup" - }, - "description": "Sample for RestoreBackup", - "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "RestoreBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restore_backup" - }, - "description": "Sample for RestoreBackup", - "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", - "segments": [ - { - "end": 60, - "start": 27, - "type": "FULL" - }, - { - "end": 60, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 57, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 61, - "start": 58, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "TriggerBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "rule_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "trigger_backup" - }, - "description": "Sample for TriggerBackup", - "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "TriggerBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "rule_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "trigger_backup" - }, - "description": "Sample for TriggerBackup", - "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "UpdateBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" - }, - { - "name": "backup_vault", - "type": "google.cloud.backupdr_v1.types.BackupVault" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_backup_vault" - }, - "description": "Sample for UpdateBackupVault", - "file": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_update_backup_vault_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_vault", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupVault", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "UpdateBackupVault" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.UpdateBackupVaultRequest" - }, - { - "name": "backup_vault", - "type": "google.cloud.backupdr_v1.types.BackupVault" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_backup_vault" - }, - "description": "Sample for UpdateBackupVault", - "file": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupVault_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_update_backup_vault_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "UpdateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" - }, - { - "name": "backup", - "type": "google.cloud.backupdr_v1.types.Backup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_backup" - }, - "description": "Sample for UpdateBackup", - "file": "backupdr_v1_generated_backup_dr_update_backup_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_update_backup_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackup", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "UpdateBackup" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.UpdateBackupRequest" - }, - { - "name": "backup", - "type": "google.cloud.backupdr_v1.types.Backup" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_backup" - }, - "description": "Sample for UpdateBackup", - "file": "backupdr_v1_generated_backup_dr_update_backup_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackup_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_update_backup_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", - "shortName": "BackupDRAsyncClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_data_source", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "UpdateDataSource" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" - }, - { - "name": "data_source", - "type": "google.cloud.backupdr_v1.types.DataSource" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_data_source" - }, - "description": "Sample for UpdateDataSource", - "file": "backupdr_v1_generated_backup_dr_update_data_source_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_update_data_source_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.backupdr_v1.BackupDRClient", - "shortName": "BackupDRClient" - }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_data_source", - "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateDataSource", - "service": { - "fullName": "google.cloud.backupdr.v1.BackupDR", - "shortName": "BackupDR" - }, - "shortName": "UpdateDataSource" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.backupdr_v1.types.UpdateDataSourceRequest" - }, - { - "name": "data_source", - "type": "google.cloud.backupdr_v1.types.DataSource" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_data_source" - }, - "description": "Sample for UpdateDataSource", - "file": "backupdr_v1_generated_backup_dr_update_data_source_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_UpdateDataSource_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "backupdr_v1_generated_backup_dr_update_data_source_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-backupdr/v1/scripts/fixup_backupdr_v1_keywords.py b/owl-bot-staging/google-cloud-backupdr/v1/scripts/fixup_backupdr_v1_keywords.py deleted file mode 100644 index a3de29cabf96..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/scripts/fixup_backupdr_v1_keywords.py +++ /dev/null @@ -1,203 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class backupdrCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_backup_plan': ('parent', 'backup_plan_id', 'backup_plan', 'request_id', ), - 'create_backup_plan_association': ('parent', 'backup_plan_association_id', 'backup_plan_association', 'request_id', ), - 'create_backup_vault': ('parent', 'backup_vault_id', 'backup_vault', 'request_id', 'validate_only', ), - 'create_management_server': ('parent', 'management_server_id', 'management_server', 'request_id', ), - 'delete_backup': ('name', 'request_id', ), - 'delete_backup_plan': ('name', 'request_id', ), - 'delete_backup_plan_association': ('name', 'request_id', ), - 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', 'ignore_backup_plan_references', ), - 'delete_management_server': ('name', 'request_id', ), - 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'get_backup': ('name', 'view', ), - 'get_backup_plan': ('name', ), - 'get_backup_plan_association': ('name', ), - 'get_backup_vault': ('name', 'view', ), - 'get_data_source': ('name', ), - 'get_management_server': ('name', ), - 'initialize_service': ('name', 'resource_type', 'request_id', ), - 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), - 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), - 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'compute_instance_restore_properties', ), - 'trigger_backup': ('name', 'rule_id', 'request_id', ), - 'update_backup': ('update_mask', 'backup', 'request_id', ), - 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', ), - 'update_data_source': ('update_mask', 'data_source', 'request_id', 'allow_missing', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=backupdrCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the backupdr client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/setup.py b/owl-bot-staging/google-cloud-backupdr/v1/setup.py deleted file mode 100644 index 09d7f70e2d38..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-backupdr' - - -description = "Google Cloud Backupdr API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/backupdr/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-backupdr" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-backupdr/v1/tests/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/backupdr_v1/__init__.py b/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/backupdr_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/backupdr_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/backupdr_v1/test_backup_dr.py deleted file mode 100644 index 58b00dc341ab..000000000000 --- a/owl-bot-staging/google-cloud-backupdr/v1/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ /dev/null @@ -1,24559 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.backupdr_v1.services.backup_dr import BackupDRAsyncClient -from google.cloud.backupdr_v1.services.backup_dr import BackupDRClient -from google.cloud.backupdr_v1.services.backup_dr import pagers -from google.cloud.backupdr_v1.services.backup_dr import transports -from google.cloud.backupdr_v1.types import backupdr -from google.cloud.backupdr_v1.types import backupplan -from google.cloud.backupdr_v1.types import backupplanassociation -from google.cloud.backupdr_v1.types import backupvault -from google.cloud.backupdr_v1.types import backupvault_ba -from google.cloud.backupdr_v1.types import backupvault_gce -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import month_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BackupDRClient._get_default_mtls_endpoint(None) is None - assert BackupDRClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BackupDRClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BackupDRClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BackupDRClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BackupDRClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert BackupDRClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BackupDRClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BackupDRClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BackupDRClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BackupDRClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BackupDRClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BackupDRClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - BackupDRClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BackupDRClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert BackupDRClient._get_client_cert_source(None, False) is None - assert BackupDRClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BackupDRClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BackupDRClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BackupDRClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(BackupDRClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRClient)) -@mock.patch.object(BackupDRAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = BackupDRClient._DEFAULT_UNIVERSE - default_endpoint = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert BackupDRClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BackupDRClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BackupDRClient.DEFAULT_MTLS_ENDPOINT - assert BackupDRClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BackupDRClient._get_api_endpoint(None, None, default_universe, "always") == BackupDRClient.DEFAULT_MTLS_ENDPOINT - assert BackupDRClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BackupDRClient.DEFAULT_MTLS_ENDPOINT - assert BackupDRClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BackupDRClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - BackupDRClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert BackupDRClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BackupDRClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BackupDRClient._get_universe_domain(None, None) == BackupDRClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - BackupDRClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = BackupDRClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = BackupDRClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (BackupDRClient, "grpc"), - (BackupDRAsyncClient, "grpc_asyncio"), - (BackupDRClient, "rest"), -]) -def test_backup_dr_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'backupdr.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://backupdr.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BackupDRGrpcTransport, "grpc"), - (transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BackupDRRestTransport, "rest"), -]) -def test_backup_dr_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BackupDRClient, "grpc"), - (BackupDRAsyncClient, "grpc_asyncio"), - (BackupDRClient, "rest"), -]) -def test_backup_dr_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'backupdr.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://backupdr.googleapis.com' - ) - - -def test_backup_dr_client_get_transport_class(): - transport = BackupDRClient.get_transport_class() - available_transports = [ - transports.BackupDRGrpcTransport, - transports.BackupDRRestTransport, - ] - assert transport in available_transports - - transport = BackupDRClient.get_transport_class("grpc") - assert transport == transports.BackupDRGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc"), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio"), - (BackupDRClient, transports.BackupDRRestTransport, "rest"), -]) -@mock.patch.object(BackupDRClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRClient)) -@mock.patch.object(BackupDRAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRAsyncClient)) -def test_backup_dr_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BackupDRClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BackupDRClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc", "true"), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc", "false"), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BackupDRClient, transports.BackupDRRestTransport, "rest", "true"), - (BackupDRClient, transports.BackupDRRestTransport, "rest", "false"), -]) -@mock.patch.object(BackupDRClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRClient)) -@mock.patch.object(BackupDRAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_backup_dr_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BackupDRClient, BackupDRAsyncClient -]) -@mock.patch.object(BackupDRClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackupDRClient)) -@mock.patch.object(BackupDRAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BackupDRAsyncClient)) -def test_backup_dr_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - BackupDRClient, BackupDRAsyncClient -]) -@mock.patch.object(BackupDRClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRClient)) -@mock.patch.object(BackupDRAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BackupDRAsyncClient)) -def test_backup_dr_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = BackupDRClient._DEFAULT_UNIVERSE - default_endpoint = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BackupDRClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc"), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio"), - (BackupDRClient, transports.BackupDRRestTransport, "rest"), -]) -def test_backup_dr_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc", grpc_helpers), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BackupDRClient, transports.BackupDRRestTransport, "rest", None), -]) -def test_backup_dr_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_backup_dr_client_client_options_from_dict(): - with mock.patch('google.cloud.backupdr_v1.services.backup_dr.transports.BackupDRGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BackupDRClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BackupDRClient, transports.BackupDRGrpcTransport, "grpc", grpc_helpers), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_backup_dr_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "backupdr.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="backupdr.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - backupdr.ListManagementServersRequest, - dict, -]) -def test_list_management_servers(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupdr.ListManagementServersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupdr.ListManagementServersRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_management_servers_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupdr.ListManagementServersRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_management_servers(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupdr.ListManagementServersRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_management_servers_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_management_servers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_management_servers] = mock_rpc - request = {} - client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_management_servers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_management_servers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_management_servers in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_management_servers] = mock_rpc - - request = {} - await client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_management_servers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_management_servers_async(transport: str = 'grpc_asyncio', request_type=backupdr.ListManagementServersRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ListManagementServersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupdr.ListManagementServersRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_management_servers_async_from_dict(): - await test_list_management_servers_async(request_type=dict) - -def test_list_management_servers_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.ListManagementServersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - call.return_value = backupdr.ListManagementServersResponse() - client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_management_servers_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.ListManagementServersRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ListManagementServersResponse()) - await client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_management_servers_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupdr.ListManagementServersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_management_servers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_management_servers_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_management_servers_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupdr.ListManagementServersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ListManagementServersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_management_servers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_management_servers_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent='parent_value', - ) - - -def test_list_management_servers_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token='abc', - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token='def', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token='ghi', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_management_servers(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) - for i in results) -def test_list_management_servers_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token='abc', - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token='def', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token='ghi', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - RuntimeError, - ) - pages = list(client.list_management_servers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_management_servers_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token='abc', - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token='def', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token='ghi', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_management_servers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupdr.ManagementServer) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_management_servers_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token='abc', - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token='def', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token='ghi', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_management_servers(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupdr.GetManagementServerRequest, - dict, -]) -def test_get_management_server(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupdr.ManagementServer( - name='name_value', - description='description_value', - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag='etag_value', - oauth2_client_id='oauth2_client_id_value', - ba_proxy_uri=['ba_proxy_uri_value'], - satisfies_pzi=True, - ) - response = client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupdr.GetManagementServerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == 'etag_value' - assert response.oauth2_client_id == 'oauth2_client_id_value' - assert response.ba_proxy_uri == ['ba_proxy_uri_value'] - assert response.satisfies_pzi is True - - -def test_get_management_server_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupdr.GetManagementServerRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_management_server(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupdr.GetManagementServerRequest( - name='name_value', - ) - -def test_get_management_server_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_management_server in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_management_server] = mock_rpc - request = {} - client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_management_server_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_management_server in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_management_server] = mock_rpc - - request = {} - await client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_management_server_async(transport: str = 'grpc_asyncio', request_type=backupdr.GetManagementServerRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ManagementServer( - name='name_value', - description='description_value', - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag='etag_value', - oauth2_client_id='oauth2_client_id_value', - ba_proxy_uri=['ba_proxy_uri_value'], - satisfies_pzi=True, - )) - response = await client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupdr.GetManagementServerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == 'etag_value' - assert response.oauth2_client_id == 'oauth2_client_id_value' - assert response.ba_proxy_uri == ['ba_proxy_uri_value'] - assert response.satisfies_pzi is True - - -@pytest.mark.asyncio -async def test_get_management_server_async_from_dict(): - await test_get_management_server_async(request_type=dict) - -def test_get_management_server_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.GetManagementServerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - call.return_value = backupdr.ManagementServer() - client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_management_server_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.GetManagementServerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ManagementServer()) - await client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_management_server_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupdr.ManagementServer() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_management_server( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_management_server_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_management_server_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupdr.ManagementServer() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ManagementServer()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_management_server( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_management_server_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_management_server( - backupdr.GetManagementServerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupdr.CreateManagementServerRequest, - dict, -]) -def test_create_management_server(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupdr.CreateManagementServerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_management_server_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupdr.CreateManagementServerRequest( - parent='parent_value', - management_server_id='management_server_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_management_server(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupdr.CreateManagementServerRequest( - parent='parent_value', - management_server_id='management_server_id_value', - request_id='request_id_value', - ) - -def test_create_management_server_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_management_server in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_management_server] = mock_rpc - request = {} - client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_management_server_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_management_server in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_management_server] = mock_rpc - - request = {} - await client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_management_server_async(transport: str = 'grpc_asyncio', request_type=backupdr.CreateManagementServerRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupdr.CreateManagementServerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_management_server_async_from_dict(): - await test_create_management_server_async(request_type=dict) - -def test_create_management_server_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.CreateManagementServerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_management_server_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.CreateManagementServerRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_management_server_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_management_server( - parent='parent_value', - management_server=backupdr.ManagementServer(name='name_value'), - management_server_id='management_server_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].management_server - mock_val = backupdr.ManagementServer(name='name_value') - assert arg == mock_val - arg = args[0].management_server_id - mock_val = 'management_server_id_value' - assert arg == mock_val - - -def test_create_management_server_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent='parent_value', - management_server=backupdr.ManagementServer(name='name_value'), - management_server_id='management_server_id_value', - ) - -@pytest.mark.asyncio -async def test_create_management_server_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_management_server( - parent='parent_value', - management_server=backupdr.ManagementServer(name='name_value'), - management_server_id='management_server_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].management_server - mock_val = backupdr.ManagementServer(name='name_value') - assert arg == mock_val - arg = args[0].management_server_id - mock_val = 'management_server_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_management_server_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent='parent_value', - management_server=backupdr.ManagementServer(name='name_value'), - management_server_id='management_server_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupdr.DeleteManagementServerRequest, - dict, -]) -def test_delete_management_server(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupdr.DeleteManagementServerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_management_server_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupdr.DeleteManagementServerRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_management_server(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupdr.DeleteManagementServerRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_management_server_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_management_server in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_management_server] = mock_rpc - request = {} - client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_management_server_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_management_server in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_management_server] = mock_rpc - - request = {} - await client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_management_server_async(transport: str = 'grpc_asyncio', request_type=backupdr.DeleteManagementServerRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupdr.DeleteManagementServerRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_management_server_async_from_dict(): - await test_delete_management_server_async(request_type=dict) - -def test_delete_management_server_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.DeleteManagementServerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_management_server_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.DeleteManagementServerRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_management_server_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_management_server( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_management_server_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_management_server_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_management_server( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_management_server_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_management_server( - backupdr.DeleteManagementServerRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.CreateBackupVaultRequest, - dict, -]) -def test_create_backup_vault(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.CreateBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_backup_vault_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.CreateBackupVaultRequest( - parent='parent_value', - backup_vault_id='backup_vault_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup_vault(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.CreateBackupVaultRequest( - parent='parent_value', - backup_vault_id='backup_vault_id_value', - ) - -def test_create_backup_vault_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_vault] = mock_rpc - request = {} - client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_vault_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup_vault in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup_vault] = mock_rpc - - request = {} - await client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_vault_async(transport: str = 'grpc_asyncio', request_type=backupvault.CreateBackupVaultRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.CreateBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_backup_vault_async_from_dict(): - await test_create_backup_vault_async(request_type=dict) - -def test_create_backup_vault_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.CreateBackupVaultRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_vault_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.CreateBackupVaultRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_vault_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_vault( - parent='parent_value', - backup_vault=backupvault.BackupVault(name='name_value'), - backup_vault_id='backup_vault_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_vault - mock_val = backupvault.BackupVault(name='name_value') - assert arg == mock_val - arg = args[0].backup_vault_id - mock_val = 'backup_vault_id_value' - assert arg == mock_val - - -def test_create_backup_vault_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_vault( - backupvault.CreateBackupVaultRequest(), - parent='parent_value', - backup_vault=backupvault.BackupVault(name='name_value'), - backup_vault_id='backup_vault_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_vault_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_vault( - parent='parent_value', - backup_vault=backupvault.BackupVault(name='name_value'), - backup_vault_id='backup_vault_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_vault - mock_val = backupvault.BackupVault(name='name_value') - assert arg == mock_val - arg = args[0].backup_vault_id - mock_val = 'backup_vault_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_vault_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_vault( - backupvault.CreateBackupVaultRequest(), - parent='parent_value', - backup_vault=backupvault.BackupVault(name='name_value'), - backup_vault_id='backup_vault_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.ListBackupVaultsRequest, - dict, -]) -def test_list_backup_vaults(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.ListBackupVaultsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupVaultsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_backup_vaults_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.ListBackupVaultsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_vaults(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.ListBackupVaultsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_backup_vaults_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_vaults in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_vaults] = mock_rpc - request = {} - client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_vaults(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_vaults_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_vaults in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_vaults] = mock_rpc - - request = {} - await client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_vaults(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_vaults_async(transport: str = 'grpc_asyncio', request_type=backupvault.ListBackupVaultsRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.ListBackupVaultsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupVaultsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_backup_vaults_async_from_dict(): - await test_list_backup_vaults_async(request_type=dict) - -def test_list_backup_vaults_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.ListBackupVaultsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - call.return_value = backupvault.ListBackupVaultsResponse() - client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_vaults_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.ListBackupVaultsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupVaultsResponse()) - await client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_vaults_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListBackupVaultsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_vaults( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_vaults_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_vaults( - backupvault.ListBackupVaultsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_vaults_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListBackupVaultsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupVaultsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_vaults( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_vaults_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_vaults( - backupvault.ListBackupVaultsRequest(), - parent='parent_value', - ) - - -def test_list_backup_vaults_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_vaults(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.BackupVault) - for i in results) -def test_list_backup_vaults_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_vaults(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_vaults_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_vaults(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupvault.BackupVault) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_vaults_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_vaults(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupvault.FetchUsableBackupVaultsRequest, - dict, -]) -def test_fetch_usable_backup_vaults(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.FetchUsableBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.FetchUsableBackupVaultsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchUsableBackupVaultsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_fetch_usable_backup_vaults_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.FetchUsableBackupVaultsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.fetch_usable_backup_vaults(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.FetchUsableBackupVaultsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_fetch_usable_backup_vaults_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.fetch_usable_backup_vaults in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.fetch_usable_backup_vaults] = mock_rpc - request = {} - client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.fetch_usable_backup_vaults(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.fetch_usable_backup_vaults in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.fetch_usable_backup_vaults] = mock_rpc - - request = {} - await client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.fetch_usable_backup_vaults(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_async(transport: str = 'grpc_asyncio', request_type=backupvault.FetchUsableBackupVaultsRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.FetchUsableBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.FetchUsableBackupVaultsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchUsableBackupVaultsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_async_from_dict(): - await test_fetch_usable_backup_vaults_async(request_type=dict) - -def test_fetch_usable_backup_vaults_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.FetchUsableBackupVaultsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - call.return_value = backupvault.FetchUsableBackupVaultsResponse() - client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.FetchUsableBackupVaultsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.FetchUsableBackupVaultsResponse()) - await client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_fetch_usable_backup_vaults_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.FetchUsableBackupVaultsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_usable_backup_vaults( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_fetch_usable_backup_vaults_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_usable_backup_vaults( - backupvault.FetchUsableBackupVaultsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.FetchUsableBackupVaultsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.FetchUsableBackupVaultsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_usable_backup_vaults( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_usable_backup_vaults( - backupvault.FetchUsableBackupVaultsRequest(), - parent='parent_value', - ) - - -def test_fetch_usable_backup_vaults_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.fetch_usable_backup_vaults(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.BackupVault) - for i in results) -def test_fetch_usable_backup_vaults_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - pages = list(client.fetch_usable_backup_vaults(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - async_pager = await client.fetch_usable_backup_vaults(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupvault.BackupVault) - for i in responses) - - -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.fetch_usable_backup_vaults(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupvault.GetBackupVaultRequest, - dict, -]) -def test_get_backup_vault(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.BackupVault( - name='name_value', - description='description_value', - deletable=True, - etag='etag_value', - state=backupvault.BackupVault.State.CREATING, - backup_count=1278, - service_account='service_account_value', - total_stored_bytes=1946, - uid='uid_value', - access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, - ) - response = client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.GetBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.BackupVault) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.deletable is True - assert response.etag == 'etag_value' - assert response.state == backupvault.BackupVault.State.CREATING - assert response.backup_count == 1278 - assert response.service_account == 'service_account_value' - assert response.total_stored_bytes == 1946 - assert response.uid == 'uid_value' - assert response.access_restriction == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT - - -def test_get_backup_vault_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.GetBackupVaultRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup_vault(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.GetBackupVaultRequest( - name='name_value', - ) - -def test_get_backup_vault_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_vault] = mock_rpc - request = {} - client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_vault_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup_vault in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup_vault] = mock_rpc - - request = {} - await client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_vault_async(transport: str = 'grpc_asyncio', request_type=backupvault.GetBackupVaultRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.BackupVault( - name='name_value', - description='description_value', - deletable=True, - etag='etag_value', - state=backupvault.BackupVault.State.CREATING, - backup_count=1278, - service_account='service_account_value', - total_stored_bytes=1946, - uid='uid_value', - access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, - )) - response = await client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.GetBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.BackupVault) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.deletable is True - assert response.etag == 'etag_value' - assert response.state == backupvault.BackupVault.State.CREATING - assert response.backup_count == 1278 - assert response.service_account == 'service_account_value' - assert response.total_stored_bytes == 1946 - assert response.uid == 'uid_value' - assert response.access_restriction == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT - - -@pytest.mark.asyncio -async def test_get_backup_vault_async_from_dict(): - await test_get_backup_vault_async(request_type=dict) - -def test_get_backup_vault_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.GetBackupVaultRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - call.return_value = backupvault.BackupVault() - client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_vault_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.GetBackupVaultRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.BackupVault()) - await client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_vault_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.BackupVault() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_vault( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_vault_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_vault( - backupvault.GetBackupVaultRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_vault_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.BackupVault() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.BackupVault()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_vault( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_vault_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_vault( - backupvault.GetBackupVaultRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.UpdateBackupVaultRequest, - dict, -]) -def test_update_backup_vault(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_backup_vault_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.UpdateBackupVaultRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_backup_vault(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.UpdateBackupVaultRequest( - ) - -def test_update_backup_vault_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup_vault] = mock_rpc - request = {} - client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_vault_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_backup_vault in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_backup_vault] = mock_rpc - - request = {} - await client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_vault_async(transport: str = 'grpc_asyncio', request_type=backupvault.UpdateBackupVaultRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_backup_vault_async_from_dict(): - await test_update_backup_vault_async(request_type=dict) - -def test_update_backup_vault_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.UpdateBackupVaultRequest() - - request.backup_vault.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_vault.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_vault_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.UpdateBackupVaultRequest() - - request.backup_vault.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup_vault.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_vault_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup_vault( - backup_vault=backupvault.BackupVault(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup_vault - mock_val = backupvault.BackupVault(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_vault_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_vault( - backupvault.UpdateBackupVaultRequest(), - backup_vault=backupvault.BackupVault(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_vault_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup_vault( - backup_vault=backupvault.BackupVault(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup_vault - mock_val = backupvault.BackupVault(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_vault_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup_vault( - backupvault.UpdateBackupVaultRequest(), - backup_vault=backupvault.BackupVault(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.DeleteBackupVaultRequest, - dict, -]) -def test_delete_backup_vault(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.DeleteBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_backup_vault_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.DeleteBackupVaultRequest( - name='name_value', - etag='etag_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup_vault(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.DeleteBackupVaultRequest( - name='name_value', - etag='etag_value', - ) - -def test_delete_backup_vault_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_vault] = mock_rpc - request = {} - client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_vault_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup_vault in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup_vault] = mock_rpc - - request = {} - await client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_vault_async(transport: str = 'grpc_asyncio', request_type=backupvault.DeleteBackupVaultRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.DeleteBackupVaultRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_backup_vault_async_from_dict(): - await test_delete_backup_vault_async(request_type=dict) - -def test_delete_backup_vault_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.DeleteBackupVaultRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_vault_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.DeleteBackupVaultRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_vault_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_vault( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_vault_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_vault( - backupvault.DeleteBackupVaultRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_vault_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_vault( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_vault_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_vault( - backupvault.DeleteBackupVaultRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.ListDataSourcesRequest, - dict, -]) -def test_list_data_sources(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListDataSourcesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.ListDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_data_sources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.ListDataSourcesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_sources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.ListDataSourcesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_data_sources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_sources] = mock_rpc - request = {} - client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_sources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_sources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_sources] = mock_rpc - - request = {} - await client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_sources_async(transport: str = 'grpc_asyncio', request_type=backupvault.ListDataSourcesRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListDataSourcesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.ListDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_data_sources_async_from_dict(): - await test_list_data_sources_async(request_type=dict) - -def test_list_data_sources_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.ListDataSourcesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value = backupvault.ListDataSourcesResponse() - client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_sources_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.ListDataSourcesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListDataSourcesResponse()) - await client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_sources_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListDataSourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_sources( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_sources_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_sources( - backupvault.ListDataSourcesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_sources_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListDataSourcesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListDataSourcesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_sources( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_sources_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_sources( - backupvault.ListDataSourcesRequest(), - parent='parent_value', - ) - - -def test_list_data_sources_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - backupvault.DataSource(), - ], - next_page_token='abc', - ), - backupvault.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - ], - next_page_token='ghi', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_sources(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.DataSource) - for i in results) -def test_list_data_sources_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - backupvault.DataSource(), - ], - next_page_token='abc', - ), - backupvault.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - ], - next_page_token='ghi', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_sources(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_sources_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - backupvault.DataSource(), - ], - next_page_token='abc', - ), - backupvault.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - ], - next_page_token='ghi', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_sources(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupvault.DataSource) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_sources_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - backupvault.DataSource(), - ], - next_page_token='abc', - ), - backupvault.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - ], - next_page_token='ghi', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_sources(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupvault.GetDataSourceRequest, - dict, -]) -def test_get_data_source(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.DataSource( - name='name_value', - state=backupvault.DataSource.State.CREATING, - backup_count=1278, - etag='etag_value', - total_stored_bytes=1946, - config_state=backupvault.BackupConfigState.ACTIVE, - ) - response = client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.GetDataSourceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.DataSource) - assert response.name == 'name_value' - assert response.state == backupvault.DataSource.State.CREATING - assert response.backup_count == 1278 - assert response.etag == 'etag_value' - assert response.total_stored_bytes == 1946 - assert response.config_state == backupvault.BackupConfigState.ACTIVE - - -def test_get_data_source_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.GetDataSourceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_source(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.GetDataSourceRequest( - name='name_value', - ) - -def test_get_data_source_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_source in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc - request = {} - client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_source in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_source] = mock_rpc - - request = {} - await client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_source_async(transport: str = 'grpc_asyncio', request_type=backupvault.GetDataSourceRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.DataSource( - name='name_value', - state=backupvault.DataSource.State.CREATING, - backup_count=1278, - etag='etag_value', - total_stored_bytes=1946, - config_state=backupvault.BackupConfigState.ACTIVE, - )) - response = await client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.GetDataSourceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.DataSource) - assert response.name == 'name_value' - assert response.state == backupvault.DataSource.State.CREATING - assert response.backup_count == 1278 - assert response.etag == 'etag_value' - assert response.total_stored_bytes == 1946 - assert response.config_state == backupvault.BackupConfigState.ACTIVE - - -@pytest.mark.asyncio -async def test_get_data_source_async_from_dict(): - await test_get_data_source_async(request_type=dict) - -def test_get_data_source_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.GetDataSourceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value = backupvault.DataSource() - client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_source_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.GetDataSourceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.DataSource()) - await client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_source_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.DataSource() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_source( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_source_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_source( - backupvault.GetDataSourceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_source_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.DataSource() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.DataSource()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_source( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_source_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_source( - backupvault.GetDataSourceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.UpdateDataSourceRequest, - dict, -]) -def test_update_data_source(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateDataSourceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_data_source_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.UpdateDataSourceRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_source(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.UpdateDataSourceRequest( - ) - -def test_update_data_source_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_source in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_source] = mock_rpc - request = {} - client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_source in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_source] = mock_rpc - - request = {} - await client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_source_async(transport: str = 'grpc_asyncio', request_type=backupvault.UpdateDataSourceRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateDataSourceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_data_source_async_from_dict(): - await test_update_data_source_async(request_type=dict) - -def test_update_data_source_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.UpdateDataSourceRequest() - - request.data_source.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_source.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_source_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.UpdateDataSourceRequest() - - request.data_source.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_source.name=name_value', - ) in kw['metadata'] - - -def test_update_data_source_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_source( - data_source=backupvault.DataSource(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_source - mock_val = backupvault.DataSource(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_source_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_source( - backupvault.UpdateDataSourceRequest(), - data_source=backupvault.DataSource(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_source_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_source( - data_source=backupvault.DataSource(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_source - mock_val = backupvault.DataSource(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_source_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_source( - backupvault.UpdateDataSourceRequest(), - data_source=backupvault.DataSource(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.ListBackupsRequest, - dict, -]) -def test_list_backups(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListBackupsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_backups_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.ListBackupsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backups(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.ListBackupsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_backups_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backups_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backups in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backups] = mock_rpc - - request = {} - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backups_async(transport: str = 'grpc_asyncio', request_type=backupvault.ListBackupsRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.ListBackupsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_backups_async_from_dict(): - await test_list_backups_async(request_type=dict) - -def test_list_backups_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backupvault.ListBackupsResponse() - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backups_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.ListBackupsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupsResponse()) - await client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backups_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListBackupsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backups_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backupvault.ListBackupsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backups_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.ListBackupsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backups( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backups_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backups( - backupvault.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - backupvault.Backup(), - ], - next_page_token='abc', - ), - backupvault.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backups(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.Backup) - for i in results) -def test_list_backups_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - backupvault.Backup(), - ], - next_page_token='abc', - ), - backupvault.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backups(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backups_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - backupvault.Backup(), - ], - next_page_token='abc', - ), - backupvault.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backups(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupvault.Backup) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backups_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - backupvault.Backup(), - ], - next_page_token='abc', - ), - backupvault.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backups(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupvault.GetBackupRequest, - dict, -]) -def test_get_backup(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.Backup( - name='name_value', - description='description_value', - etag='etag_value', - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - ) - response = client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.Backup) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.etag == 'etag_value' - assert response.state == backupvault.Backup.State.CREATING - assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED - assert response.resource_size_bytes == 2056 - - -def test_get_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.GetBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.GetBackupRequest( - name='name_value', - ) - -def test_get_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup] = mock_rpc - - request = {} - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_async(transport: str = 'grpc_asyncio', request_type=backupvault.GetBackupRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup( - name='name_value', - description='description_value', - etag='etag_value', - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - )) - response = await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.GetBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.Backup) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.etag == 'etag_value' - assert response.state == backupvault.Backup.State.CREATING - assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED - assert response.resource_size_bytes == 2056 - - -@pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) - -def test_get_backup_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backupvault.Backup() - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.GetBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) - await client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.Backup() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backupvault.GetBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupvault.Backup() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup( - backupvault.GetBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.UpdateBackupRequest, - dict, -]) -def test_update_backup(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.UpdateBackupRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.UpdateBackupRequest( - ) - -def test_update_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc - request = {} - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_backup] = mock_rpc - - request = {} - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_backup_async(transport: str = 'grpc_asyncio', request_type=backupvault.UpdateBackupRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) - -def test_update_backup_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.UpdateBackupRequest() - - request.backup.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_backup_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.UpdateBackupRequest() - - request.backup.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'backup.name=name_value', - ) in kw['metadata'] - - -def test_update_backup_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_backup( - backup=backupvault.Backup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = backupvault.Backup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_backup_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - backupvault.UpdateBackupRequest(), - backup=backupvault.Backup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_backup_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_backup( - backup=backupvault.Backup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = backupvault.Backup(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_backup( - backupvault.UpdateBackupRequest(), - backup=backupvault.Backup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.DeleteBackupRequest, - dict, -]) -def test_delete_backup(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.DeleteBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.DeleteBackupRequest( - name='name_value', - ) - -def test_delete_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup] = mock_rpc - - request = {} - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_async(transport: str = 'grpc_asyncio', request_type=backupvault.DeleteBackupRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.DeleteBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) - -def test_delete_backup_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.DeleteBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backupvault.DeleteBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup( - backupvault.DeleteBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupvault.RestoreBackupRequest, - dict, -]) -def test_restore_backup(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.RestoreBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.RestoreBackupRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.restore_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.RestoreBackupRequest( - name='name_value', - ) - -def test_restore_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc - request = {} - client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.restore_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.restore_backup] = mock_rpc - - request = {} - await client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.restore_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_backup_async(transport: str = 'grpc_asyncio', request_type=backupvault.RestoreBackupRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupvault.RestoreBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_backup_async_from_dict(): - await test_restore_backup_async(request_type=dict) - -def test_restore_backup_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.RestoreBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restore_backup_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupvault.RestoreBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_restore_backup_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.restore_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_restore_backup_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_backup( - backupvault.RestoreBackupRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_restore_backup_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.restore_backup( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_restore_backup_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.restore_backup( - backupvault.RestoreBackupRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplan.CreateBackupPlanRequest, - dict, -]) -def test_create_backup_plan(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplan.CreateBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_backup_plan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplan.CreateBackupPlanRequest( - parent='parent_value', - backup_plan_id='backup_plan_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup_plan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.CreateBackupPlanRequest( - parent='parent_value', - backup_plan_id='backup_plan_id_value', - ) - -def test_create_backup_plan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_plan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_plan] = mock_rpc - request = {} - client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_plan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup_plan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup_plan] = mock_rpc - - request = {} - await client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_plan_async(transport: str = 'grpc_asyncio', request_type=backupplan.CreateBackupPlanRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplan.CreateBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_backup_plan_async_from_dict(): - await test_create_backup_plan_async(request_type=dict) - -def test_create_backup_plan_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.CreateBackupPlanRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_plan_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.CreateBackupPlanRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_plan_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_plan( - parent='parent_value', - backup_plan=backupplan.BackupPlan(name='name_value'), - backup_plan_id='backup_plan_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_plan - mock_val = backupplan.BackupPlan(name='name_value') - assert arg == mock_val - arg = args[0].backup_plan_id - mock_val = 'backup_plan_id_value' - assert arg == mock_val - - -def test_create_backup_plan_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_plan( - backupplan.CreateBackupPlanRequest(), - parent='parent_value', - backup_plan=backupplan.BackupPlan(name='name_value'), - backup_plan_id='backup_plan_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_plan_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_plan( - parent='parent_value', - backup_plan=backupplan.BackupPlan(name='name_value'), - backup_plan_id='backup_plan_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_plan - mock_val = backupplan.BackupPlan(name='name_value') - assert arg == mock_val - arg = args[0].backup_plan_id - mock_val = 'backup_plan_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_plan_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_plan( - backupplan.CreateBackupPlanRequest(), - parent='parent_value', - backup_plan=backupplan.BackupPlan(name='name_value'), - backup_plan_id='backup_plan_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplan.GetBackupPlanRequest, - dict, -]) -def test_get_backup_plan(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan( - name='name_value', - description='description_value', - state=backupplan.BackupPlan.State.CREATING, - resource_type='resource_type_value', - etag='etag_value', - backup_vault='backup_vault_value', - backup_vault_service_account='backup_vault_service_account_value', - ) - response = client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == 'resource_type_value' - assert response.etag == 'etag_value' - assert response.backup_vault == 'backup_vault_value' - assert response.backup_vault_service_account == 'backup_vault_service_account_value' - - -def test_get_backup_plan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplan.GetBackupPlanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup_plan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.GetBackupPlanRequest( - name='name_value', - ) - -def test_get_backup_plan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_plan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc - request = {} - client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_plan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup_plan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup_plan] = mock_rpc - - request = {} - await client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_plan_async(transport: str = 'grpc_asyncio', request_type=backupplan.GetBackupPlanRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupplan.BackupPlan( - name='name_value', - description='description_value', - state=backupplan.BackupPlan.State.CREATING, - resource_type='resource_type_value', - etag='etag_value', - backup_vault='backup_vault_value', - backup_vault_service_account='backup_vault_service_account_value', - )) - response = await client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == 'resource_type_value' - assert response.etag == 'etag_value' - assert response.backup_vault == 'backup_vault_value' - assert response.backup_vault_service_account == 'backup_vault_service_account_value' - - -@pytest.mark.asyncio -async def test_get_backup_plan_async_from_dict(): - await test_get_backup_plan_async(request_type=dict) - -def test_get_backup_plan_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - call.return_value = backupplan.BackupPlan() - client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_plan_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplan.BackupPlan()) - await client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_plan_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_plan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_plan_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_plan_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplan.BackupPlan()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_plan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_plan_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplan.ListBackupPlansRequest, - dict, -]) -def test_list_backup_plans(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlansRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_backup_plans_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplan.ListBackupPlansRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_plans(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.ListBackupPlansRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_backup_plans_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_plans in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_plans] = mock_rpc - request = {} - client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_plans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_plans_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_plans in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_plans] = mock_rpc - - request = {} - await client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_plans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_plans_async(transport: str = 'grpc_asyncio', request_type=backupplan.ListBackupPlansRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupplan.ListBackupPlansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlansRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_backup_plans_async_from_dict(): - await test_list_backup_plans_async(request_type=dict) - -def test_list_backup_plans_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlansRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - call.return_value = backupplan.ListBackupPlansResponse() - client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_plans_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlansRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplan.ListBackupPlansResponse()) - await client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_plans_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_plans( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_plans_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_plans_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplan.ListBackupPlansResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_plans( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_plans_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent='parent_value', - ) - - -def test_list_backup_plans_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token='abc', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token='def', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token='ghi', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplan.BackupPlan) - for i in results) -def test_list_backup_plans_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token='abc', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token='def', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token='ghi', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_plans(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_plans_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token='abc', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token='def', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token='ghi', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_plans(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupplan.BackupPlan) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_plans_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token='abc', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token='def', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token='ghi', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_plans(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupplan.DeleteBackupPlanRequest, - dict, -]) -def test_delete_backup_plan(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplan.DeleteBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplan.DeleteBackupPlanRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup_plan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.DeleteBackupPlanRequest( - name='name_value', - ) - -def test_delete_backup_plan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_plan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_plan] = mock_rpc - request = {} - client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_plan_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup_plan in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup_plan] = mock_rpc - - request = {} - await client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_plan_async(transport: str = 'grpc_asyncio', request_type=backupplan.DeleteBackupPlanRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplan.DeleteBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_backup_plan_async_from_dict(): - await test_delete_backup_plan_async(request_type=dict) - -def test_delete_backup_plan_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.DeleteBackupPlanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_plan_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplan.DeleteBackupPlanRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_plan_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_plan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_plan_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_plan_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_plan( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_plan_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.CreateBackupPlanAssociationRequest, - dict, -]) -def test_create_backup_plan_association(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplanassociation.CreateBackupPlanAssociationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplanassociation.CreateBackupPlanAssociationRequest( - parent='parent_value', - backup_plan_association_id='backup_plan_association_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_backup_plan_association(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( - parent='parent_value', - backup_plan_association_id='backup_plan_association_id_value', - ) - -def test_create_backup_plan_association_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_plan_association in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_plan_association] = mock_rpc - request = {} - client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_plan_association_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_backup_plan_association in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_backup_plan_association] = mock_rpc - - request = {} - await client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_backup_plan_association_async(transport: str = 'grpc_asyncio', request_type=backupplanassociation.CreateBackupPlanAssociationRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplanassociation.CreateBackupPlanAssociationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_backup_plan_association_async_from_dict(): - await test_create_backup_plan_association_async(request_type=dict) - -def test_create_backup_plan_association_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.CreateBackupPlanAssociationRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_backup_plan_association_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.CreateBackupPlanAssociationRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_backup_plan_association_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_backup_plan_association( - parent='parent_value', - backup_plan_association=backupplanassociation.BackupPlanAssociation(name='name_value'), - backup_plan_association_id='backup_plan_association_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_plan_association - mock_val = backupplanassociation.BackupPlanAssociation(name='name_value') - assert arg == mock_val - arg = args[0].backup_plan_association_id - mock_val = 'backup_plan_association_id_value' - assert arg == mock_val - - -def test_create_backup_plan_association_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), - parent='parent_value', - backup_plan_association=backupplanassociation.BackupPlanAssociation(name='name_value'), - backup_plan_association_id='backup_plan_association_id_value', - ) - -@pytest.mark.asyncio -async def test_create_backup_plan_association_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_backup_plan_association( - parent='parent_value', - backup_plan_association=backupplanassociation.BackupPlanAssociation(name='name_value'), - backup_plan_association_id='backup_plan_association_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].backup_plan_association - mock_val = backupplanassociation.BackupPlanAssociation(name='name_value') - assert arg == mock_val - arg = args[0].backup_plan_association_id - mock_val = 'backup_plan_association_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_backup_plan_association_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), - parent='parent_value', - backup_plan_association=backupplanassociation.BackupPlanAssociation(name='name_value'), - backup_plan_association_id='backup_plan_association_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.GetBackupPlanAssociationRequest, - dict, -]) -def test_get_backup_plan_association(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation( - name='name_value', - resource_type='resource_type_value', - resource='resource_value', - backup_plan='backup_plan_value', - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source='data_source_value', - ) - response = client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplanassociation.GetBackupPlanAssociationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) - assert response.name == 'name_value' - assert response.resource_type == 'resource_type_value' - assert response.resource == 'resource_value' - assert response.backup_plan == 'backup_plan_value' - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == 'data_source_value' - - -def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplanassociation.GetBackupPlanAssociationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_backup_plan_association(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( - name='name_value', - ) - -def test_get_backup_plan_association_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_plan_association in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_plan_association] = mock_rpc - request = {} - client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_plan_association_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_backup_plan_association in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_backup_plan_association] = mock_rpc - - request = {} - await client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_backup_plan_association_async(transport: str = 'grpc_asyncio', request_type=backupplanassociation.GetBackupPlanAssociationRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.BackupPlanAssociation( - name='name_value', - resource_type='resource_type_value', - resource='resource_value', - backup_plan='backup_plan_value', - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source='data_source_value', - )) - response = await client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplanassociation.GetBackupPlanAssociationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) - assert response.name == 'name_value' - assert response.resource_type == 'resource_type_value' - assert response.resource == 'resource_value' - assert response.backup_plan == 'backup_plan_value' - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == 'data_source_value' - - -@pytest.mark.asyncio -async def test_get_backup_plan_association_async_from_dict(): - await test_get_backup_plan_association_async(request_type=dict) - -def test_get_backup_plan_association_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.GetBackupPlanAssociationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - call.return_value = backupplanassociation.BackupPlanAssociation() - client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_backup_plan_association_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.GetBackupPlanAssociationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.BackupPlanAssociation()) - await client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_backup_plan_association_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_backup_plan_association( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_backup_plan_association_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_backup_plan_association_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.BackupPlanAssociation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_backup_plan_association( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_backup_plan_association_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.ListBackupPlanAssociationsRequest, - dict, -]) -def test_list_backup_plan_associations(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplanassociation.ListBackupPlanAssociationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplanassociation.ListBackupPlanAssociationsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_backup_plan_associations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_backup_plan_associations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_plan_associations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_plan_associations] = mock_rpc - request = {} - client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_plan_associations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_backup_plan_associations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_backup_plan_associations] = mock_rpc - - request = {} - await client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_backup_plan_associations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_async(transport: str = 'grpc_asyncio', request_type=backupplanassociation.ListBackupPlanAssociationsRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplanassociation.ListBackupPlanAssociationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_async_from_dict(): - await test_list_backup_plan_associations_async(request_type=dict) - -def test_list_backup_plan_associations_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.ListBackupPlanAssociationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.ListBackupPlanAssociationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.ListBackupPlanAssociationsResponse()) - await client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_backup_plan_associations_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_backup_plan_associations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_backup_plan_associations_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.ListBackupPlanAssociationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_backup_plan_associations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='abc', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], - next_page_token='def', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='ghi', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_backup_plan_associations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplanassociation.BackupPlanAssociation) - for i in results) -def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='abc', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], - next_page_token='def', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='ghi', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_plan_associations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='abc', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], - next_page_token='def', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='ghi', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_plan_associations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupplanassociation.BackupPlanAssociation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_plan_associations_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='abc', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], - next_page_token='def', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='ghi', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_plan_associations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.DeleteBackupPlanAssociationRequest, - dict, -]) -def test_delete_backup_plan_association(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplanassociation.DeleteBackupPlanAssociationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplanassociation.DeleteBackupPlanAssociationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_backup_plan_association(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( - name='name_value', - ) - -def test_delete_backup_plan_association_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_plan_association in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_plan_association] = mock_rpc - request = {} - client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_backup_plan_association in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_backup_plan_association] = mock_rpc - - request = {} - await client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_backup_plan_association_async(transport: str = 'grpc_asyncio', request_type=backupplanassociation.DeleteBackupPlanAssociationRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplanassociation.DeleteBackupPlanAssociationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_backup_plan_association_async_from_dict(): - await test_delete_backup_plan_association_async(request_type=dict) - -def test_delete_backup_plan_association_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.DeleteBackupPlanAssociationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_backup_plan_association_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.DeleteBackupPlanAssociationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_backup_plan_association_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_backup_plan_association( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_backup_plan_association_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_backup_plan_association_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_backup_plan_association( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_backup_plan_association_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.TriggerBackupRequest, - dict, -]) -def test_trigger_backup(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplanassociation.TriggerBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_trigger_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplanassociation.TriggerBackupRequest( - name='name_value', - rule_id='rule_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.trigger_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.TriggerBackupRequest( - name='name_value', - rule_id='rule_id_value', - ) - -def test_trigger_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.trigger_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc - request = {} - client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.trigger_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_trigger_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.trigger_backup in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.trigger_backup] = mock_rpc - - request = {} - await client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.trigger_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_trigger_backup_async(transport: str = 'grpc_asyncio', request_type=backupplanassociation.TriggerBackupRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupplanassociation.TriggerBackupRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_trigger_backup_async_from_dict(): - await test_trigger_backup_async(request_type=dict) - -def test_trigger_backup_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.TriggerBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_trigger_backup_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupplanassociation.TriggerBackupRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_trigger_backup_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.trigger_backup( - name='name_value', - rule_id='rule_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].rule_id - mock_val = 'rule_id_value' - assert arg == mock_val - - -def test_trigger_backup_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), - name='name_value', - rule_id='rule_id_value', - ) - -@pytest.mark.asyncio -async def test_trigger_backup_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.trigger_backup( - name='name_value', - rule_id='rule_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].rule_id - mock_val = 'rule_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_trigger_backup_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), - name='name_value', - rule_id='rule_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - backupdr.InitializeServiceRequest, - dict, -]) -def test_initialize_service(request_type, transport: str = 'grpc'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupdr.InitializeServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_initialize_service_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupdr.InitializeServiceRequest( - name='name_value', - resource_type='resource_type_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.initialize_service(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupdr.InitializeServiceRequest( - name='name_value', - resource_type='resource_type_value', - ) - -def test_initialize_service_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.initialize_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.initialize_service] = mock_rpc - request = {} - client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.initialize_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_initialize_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.initialize_service in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.initialize_service] = mock_rpc - - request = {} - await client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.initialize_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_initialize_service_async(transport: str = 'grpc_asyncio', request_type=backupdr.InitializeServiceRequest): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = backupdr.InitializeServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_initialize_service_async_from_dict(): - await test_initialize_service_async(request_type=dict) - -def test_initialize_service_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.InitializeServiceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_initialize_service_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = backupdr.InitializeServiceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_list_management_servers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_management_servers in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_management_servers] = mock_rpc - - request = {} - client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_management_servers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_management_servers_rest_required_fields(request_type=backupdr.ListManagementServersRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_management_servers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_management_servers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_management_servers(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_management_servers_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_management_servers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_management_servers_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_management_servers(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/managementServers" % client.transport._host, args[1]) - - -def test_list_management_servers_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), - parent='parent_value', - ) - - -def test_list_management_servers_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token='abc', - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token='def', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token='ghi', - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupdr.ListManagementServersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_management_servers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) - for i in results) - - pages = list(client.list_management_servers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_management_server_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_management_server in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_management_server] = mock_rpc - - request = {} - client.get_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_management_server_rest_required_fields(request_type=backupdr.GetManagementServerRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_management_server(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_management_server_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_management_server_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/managementServers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_management_server(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/managementServers/*}" % client.transport._host, args[1]) - - -def test_get_management_server_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name='name_value', - ) - - -def test_create_management_server_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_management_server in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_management_server] = mock_rpc - - request = {} - client.create_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_management_server_rest_required_fields(request_type=backupdr.CreateManagementServerRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["management_server_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "managementServerId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == request_init["management_server_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["managementServerId"] = 'management_server_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("management_server_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == 'management_server_id_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_management_server(request) - - expected_params = [ - ( - "managementServerId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_management_server_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("managementServerId", "requestId", )) & set(("parent", "managementServerId", "managementServer", ))) - - -def test_create_management_server_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - management_server=backupdr.ManagementServer(name='name_value'), - management_server_id='management_server_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_management_server(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/managementServers" % client.transport._host, args[1]) - - -def test_create_management_server_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), - parent='parent_value', - management_server=backupdr.ManagementServer(name='name_value'), - management_server_id='management_server_id_value', - ) - - -def test_delete_management_server_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_management_server in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_management_server] = mock_rpc - - request = {} - client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_management_server_rest_required_fields(request_type=backupdr.DeleteManagementServerRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_management_server(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_management_server_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) - - -def test_delete_management_server_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/managementServers/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_management_server(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/managementServers/*}" % client.transport._host, args[1]) - - -def test_delete_management_server_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), - name='name_value', - ) - - -def test_create_backup_vault_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_vault] = mock_rpc - - request = {} - client.create_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_vault_rest_required_fields(request_type=backupvault.CreateBackupVaultRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_vault_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupVaultId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupVaultId" in jsonified_request - assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupVaultId"] = 'backup_vault_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_vault_id", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupVaultId" in jsonified_request - assert jsonified_request["backupVaultId"] == 'backup_vault_id_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup_vault(request) - - expected_params = [ - ( - "backupVaultId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupVaultId", "requestId", "validateOnly", )) & set(("parent", "backupVaultId", "backupVault", ))) - - -def test_create_backup_vault_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_vault=backupvault.BackupVault(name='name_value'), - backup_vault_id='backup_vault_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup_vault(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupVaults" % client.transport._host, args[1]) - - -def test_create_backup_vault_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_vault( - backupvault.CreateBackupVaultRequest(), - parent='parent_value', - backup_vault=backupvault.BackupVault(name='name_value'), - backup_vault_id='backup_vault_id_value', - ) - - -def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_vaults in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_vaults] = mock_rpc - - request = {} - client.list_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_vaults(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_vaults_rest_required_fields(request_type=backupvault.ListBackupVaultsRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_vaults._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_vaults._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupVaultsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_vaults(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_vaults_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", "view", )) & set(("parent", ))) - - -def test_list_backup_vaults_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupVaultsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.ListBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_vaults(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupVaults" % client.transport._host, args[1]) - - -def test_list_backup_vaults_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_vaults( - backupvault.ListBackupVaultsRequest(), - parent='parent_value', - ) - - -def test_list_backup_vaults_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupvault.ListBackupVaultsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_backup_vaults(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.BackupVault) - for i in results) - - pages = list(client.list_backup_vaults(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.fetch_usable_backup_vaults in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.fetch_usable_backup_vaults] = mock_rpc - - request = {} - client.fetch_usable_backup_vaults(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.fetch_usable_backup_vaults(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_fetch_usable_backup_vaults_rest_required_fields(request_type=backupvault.FetchUsableBackupVaultsRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.FetchUsableBackupVaultsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.fetch_usable_backup_vaults(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_fetch_usable_backup_vaults_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_fetch_usable_backup_vaults_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.FetchUsableBackupVaultsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.fetch_usable_backup_vaults(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" % client.transport._host, args[1]) - - -def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_usable_backup_vaults( - backupvault.FetchUsableBackupVaultsRequest(), - parent='parent_value', - ) - - -def test_fetch_usable_backup_vaults_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token='abc', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[], - next_page_token='def', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token='ghi', - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.fetch_usable_backup_vaults(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.BackupVault) - for i in results) - - pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_backup_vault_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_vault] = mock_rpc - - request = {} - client.get_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_vault_rest_required_fields(request_type=backupvault.GetBackupVaultRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.BackupVault() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.BackupVault.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup_vault(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view", )) & set(("name", ))) - - -def test_get_backup_vault_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.BackupVault() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.BackupVault.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup_vault(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1]) - - -def test_get_backup_vault_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_vault( - backupvault.GetBackupVaultRequest(), - name='name_value', - ) - - -def test_update_backup_vault_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup_vault] = mock_rpc - - request = {} - client.update_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_vault_rest_required_fields(request_type=backupvault.UpdateBackupVaultRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", "request_id", "update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup_vault(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", "requestId", "updateMask", "validateOnly", )) & set(("updateMask", "backupVault", ))) - - -def test_update_backup_vault_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'backup_vault': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - backup_vault=backupvault.BackupVault(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup_vault(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1]) - - -def test_update_backup_vault_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup_vault( - backupvault.UpdateBackupVaultRequest(), - backup_vault=backupvault.BackupVault(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_vault in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_vault] = mock_rpc - - request = {} - client.delete_backup_vault(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_vault(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_vault_rest_required_fields(request_type=backupvault.DeleteBackupVaultRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "force", "ignore_backup_plan_references", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_vault(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "force", "ignoreBackupPlanReferences", "requestId", "validateOnly", )) & set(("name", ))) - - -def test_delete_backup_vault_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup_vault(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1]) - - -def test_delete_backup_vault_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_vault( - backupvault.DeleteBackupVaultRequest(), - name='name_value', - ) - - -def test_list_data_sources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_sources] = mock_rpc - - request = {} - client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_sources_rest_required_fields(request_type=backupvault.ListDataSourcesRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_sources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.ListDataSourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_sources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_sources_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_sources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_sources_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListDataSourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_sources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" % client.transport._host, args[1]) - - -def test_list_data_sources_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_sources( - backupvault.ListDataSourcesRequest(), - parent='parent_value', - ) - - -def test_list_data_sources_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - backupvault.DataSource(), - ], - next_page_token='abc', - ), - backupvault.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - ], - next_page_token='ghi', - ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupvault.ListDataSourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3'} - - pager = client.list_data_sources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.DataSource) - for i in results) - - pages = list(client.list_data_sources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_data_source_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_source in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc - - request = {} - client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_source_rest_required_fields(request_type=backupvault.GetDataSourceRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_source(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_source_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_source._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_data_source_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_source(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" % client.transport._host, args[1]) - - -def test_get_data_source_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_source( - backupvault.GetDataSourceRequest(), - name='name_value', - ) - - -def test_update_data_source_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_source in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_source] = mock_rpc - - request = {} - client.update_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_data_source_rest_required_fields(request_type=backupvault.UpdateDataSourceRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_source._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "request_id", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_data_source(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_data_source_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_data_source._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "requestId", "updateMask", )) & set(("updateMask", "dataSource", ))) - - -def test_update_data_source_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'data_source': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - data_source=backupvault.DataSource(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_data_source(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" % client.transport._host, args[1]) - - -def test_update_data_source_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_source( - backupvault.UpdateDataSourceRequest(), - data_source=backupvault.DataSource(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_list_backups_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc - - request = {} - client.list_backups(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backups(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backups(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backups_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", "view", )) & set(("parent", ))) - - -def test_list_backups_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backups(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" % client.transport._host, args[1]) - - -def test_list_backups_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backups( - backupvault.ListBackupsRequest(), - parent='parent_value', - ) - - -def test_list_backups_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - backupvault.Backup(), - ], - next_page_token='abc', - ), - backupvault.ListBackupsResponse( - backups=[], - next_page_token='def', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - ], - next_page_token='ghi', - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - - pager = client.list_backups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.Backup) - for i in results) - - pages = list(client.list_backups(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc - - request = {} - client.get_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.Backup() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view", )) & set(("name", ))) - - -def test_get_backup_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.Backup() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" % client.transport._host, args[1]) - - -def test_get_backup_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup( - backupvault.GetBackupRequest(), - name='name_value', - ) - - -def test_update_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc - - request = {} - client.update_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_backup_rest_required_fields(request_type=backupvault.UpdateBackupRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_backup_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("updateMask", "backup", ))) - - -def test_update_backup_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'backup': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'}} - - # get truthy value for each flattened field - mock_args = dict( - backup=backupvault.Backup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" % client.transport._host, args[1]) - - -def test_update_backup_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_backup( - backupvault.UpdateBackupRequest(), - backup=backupvault.Backup(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc - - request = {} - client.delete_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_rest_required_fields(request_type=backupvault.DeleteBackupRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) - - -def test_delete_backup_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" % client.transport._host, args[1]) - - -def test_delete_backup_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup( - backupvault.DeleteBackupRequest(), - name='name_value', - ) - - -def test_restore_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc - - request = {} - client.restore_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restore_backup_rest_required_fields(request_type=backupvault.RestoreBackupRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restore_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restore_backup_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restore_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_restore_backup_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.restore_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" % client.transport._host, args[1]) - - -def test_restore_backup_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_backup( - backupvault.RestoreBackupRequest(), - name='name_value', - ) - - -def test_create_backup_plan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_plan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_plan] = mock_rpc - - request = {} - client.create_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_plan_rest_required_fields(request_type=backupplan.CreateBackupPlanRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_plan_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupPlanId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_plan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupPlanId" in jsonified_request - assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupPlanId"] = 'backup_plan_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_plan._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_plan_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupPlanId" in jsonified_request - assert jsonified_request["backupPlanId"] == 'backup_plan_id_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup_plan(request) - - expected_params = [ - ( - "backupPlanId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_plan_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup_plan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupPlanId", "requestId", )) & set(("parent", "backupPlanId", "backupPlan", ))) - - -def test_create_backup_plan_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_plan=backupplan.BackupPlan(name='name_value'), - backup_plan_id='backup_plan_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup_plan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupPlans" % client.transport._host, args[1]) - - -def test_create_backup_plan_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_plan( - backupplan.CreateBackupPlanRequest(), - parent='parent_value', - backup_plan=backupplan.BackupPlan(name='name_value'), - backup_plan_id='backup_plan_id_value', - ) - - -def test_get_backup_plan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_plan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc - - request = {} - client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_plan_rest_required_fields(request_type=backupplan.GetBackupPlanRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_plan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_plan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupplan.BackupPlan() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplan.BackupPlan.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup_plan(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_plan_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup_plan._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_backup_plan_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplan.BackupPlan() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupPlans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupplan.BackupPlan.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup_plan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupPlans/*}" % client.transport._host, args[1]) - - -def test_get_backup_plan_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name='name_value', - ) - - -def test_list_backup_plans_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_plans in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_plans] = mock_rpc - - request = {} - client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_plans(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_plans_rest_required_fields(request_type=backupplan.ListBackupPlansRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_plans._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_plans._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupplan.ListBackupPlansResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplan.ListBackupPlansResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_plans(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_plans_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_plans._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backup_plans_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplan.ListBackupPlansResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupplan.ListBackupPlansResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_plans(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupPlans" % client.transport._host, args[1]) - - -def test_list_backup_plans_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent='parent_value', - ) - - -def test_list_backup_plans_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token='abc', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token='def', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token='ghi', - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupplan.ListBackupPlansResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_backup_plans(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplan.BackupPlan) - for i in results) - - pages = list(client.list_backup_plans(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_plan in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_plan] = mock_rpc - - request = {} - client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_plan_rest_required_fields(request_type=backupplan.DeleteBackupPlanRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_plan._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_plan._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_plan(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_plan_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) - - -def test_delete_backup_plan_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupPlans/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup_plan(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupPlans/*}" % client.transport._host, args[1]) - - -def test_delete_backup_plan_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name='name_value', - ) - - -def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_backup_plan_association in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_backup_plan_association] = mock_rpc - - request = {} - client.create_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_backup_plan_association_rest_required_fields(request_type=backupplanassociation.CreateBackupPlanAssociationRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["backup_plan_association_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "backupPlanAssociationId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_plan_association._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "backupPlanAssociationId" in jsonified_request - assert jsonified_request["backupPlanAssociationId"] == request_init["backup_plan_association_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["backupPlanAssociationId"] = 'backup_plan_association_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_backup_plan_association._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("backup_plan_association_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "backupPlanAssociationId" in jsonified_request - assert jsonified_request["backupPlanAssociationId"] == 'backup_plan_association_id_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_backup_plan_association(request) - - expected_params = [ - ( - "backupPlanAssociationId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_backup_plan_association_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_backup_plan_association._get_unset_required_fields({}) - assert set(unset_fields) == (set(("backupPlanAssociationId", "requestId", )) & set(("parent", "backupPlanAssociationId", "backupPlanAssociation", ))) - - -def test_create_backup_plan_association_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - backup_plan_association=backupplanassociation.BackupPlanAssociation(name='name_value'), - backup_plan_association_id='backup_plan_association_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_backup_plan_association(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" % client.transport._host, args[1]) - - -def test_create_backup_plan_association_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), - parent='parent_value', - backup_plan_association=backupplanassociation.BackupPlanAssociation(name='name_value'), - backup_plan_association_id='backup_plan_association_id_value', - ) - - -def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_backup_plan_association in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_backup_plan_association] = mock_rpc - - request = {} - client.get_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_backup_plan_association_rest_required_fields(request_type=backupplanassociation.GetBackupPlanAssociationRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_plan_association._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_backup_plan_association._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupplanassociation.BackupPlanAssociation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_backup_plan_association(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_backup_plan_association_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_backup_plan_association_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplanassociation.BackupPlanAssociation() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_backup_plan_association(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1]) - - -def test_get_backup_plan_association_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), - name='name_value', - ) - - -def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_backup_plan_associations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_backup_plan_associations] = mock_rpc - - request = {} - client.list_backup_plan_associations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_backup_plan_associations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_backup_plan_associations_rest_required_fields(request_type=backupplanassociation.ListBackupPlanAssociationsRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_plan_associations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_backup_plan_associations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_plan_associations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_backup_plan_associations_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_backup_plan_associations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_backup_plan_associations_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_plan_associations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" % client.transport._host, args[1]) - - -def test_list_backup_plan_associations_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), - parent='parent_value', - ) - - -def test_list_backup_plan_associations_rest_pager(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='abc', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], - next_page_token='def', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token='ghi', - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_backup_plan_associations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplanassociation.BackupPlanAssociation) - for i in results) - - pages = list(client.list_backup_plan_associations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_backup_plan_association in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_backup_plan_association] = mock_rpc - - request = {} - client.delete_backup_plan_association(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan_association(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_backup_plan_association_rest_required_fields(request_type=backupplanassociation.DeleteBackupPlanAssociationRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_plan_association._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_backup_plan_association._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_plan_association(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_backup_plan_association_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_backup_plan_association._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", )) & set(("name", ))) - - -def test_delete_backup_plan_association_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_backup_plan_association(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" % client.transport._host, args[1]) - - -def test_delete_backup_plan_association_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), - name='name_value', - ) - - -def test_trigger_backup_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.trigger_backup in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc - - request = {} - client.trigger_backup(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.trigger_backup(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_trigger_backup_rest_required_fields(request_type=backupplanassociation.TriggerBackupRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request_init["rule_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).trigger_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["ruleId"] = 'rule_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).trigger_backup._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "ruleId" in jsonified_request - assert jsonified_request["ruleId"] == 'rule_id_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.trigger_backup(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_trigger_backup_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.trigger_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "ruleId", ))) - - -def test_trigger_backup_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - rule_id='rule_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.trigger_backup(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" % client.transport._host, args[1]) - - -def test_trigger_backup_rest_flattened_error(transport: str = 'rest'): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), - name='name_value', - rule_id='rule_id_value', - ) - - -def test_initialize_service_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.initialize_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.initialize_service] = mock_rpc - - request = {} - client.initialize_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.initialize_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_initialize_service_rest_required_fields(request_type=backupdr.InitializeServiceRequest): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request_init["resource_type"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).initialize_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["resourceType"] = 'resource_type_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).initialize_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "resourceType" in jsonified_request - assert jsonified_request["resourceType"] == 'resource_type_value' - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.initialize_service(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_initialize_service_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.initialize_service._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "resourceType", ))) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BackupDRClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BackupDRClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BackupDRClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BackupDRClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BackupDRClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BackupDRGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BackupDRGrpcTransport, - transports.BackupDRGrpcAsyncIOTransport, - transports.BackupDRRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = BackupDRClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_management_servers_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - call.return_value = backupdr.ListManagementServersResponse() - client.list_management_servers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.ListManagementServersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_management_server_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - call.return_value = backupdr.ManagementServer() - client.get_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.GetManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_management_server_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.CreateManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_management_server_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.DeleteManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.CreateBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_vaults_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - call.return_value = backupvault.ListBackupVaultsResponse() - client.list_backup_vaults(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupVaultsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_fetch_usable_backup_vaults_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - call.return_value = backupvault.FetchUsableBackupVaultsResponse() - client.fetch_usable_backup_vaults(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.FetchUsableBackupVaultsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - call.return_value = backupvault.BackupVault() - client.get_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_sources_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value = backupvault.ListDataSourcesResponse() - client.list_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_source_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value = backupvault.DataSource() - client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_source_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - call.return_value = backupvault.ListBackupsResponse() - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - call.return_value = backupvault.Backup() - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.RestoreBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_plan_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.CreateBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_plan_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - call.return_value = backupplan.BackupPlan() - client.get_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.GetBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_plans_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - call.return_value = backupplan.ListBackupPlansResponse() - client.list_backup_plans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.ListBackupPlansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_plan_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.DeleteBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_plan_association_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_plan_association_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - call.return_value = backupplanassociation.BackupPlanAssociation() - client.get_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.GetBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_plan_associations_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.list_backup_plan_associations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_plan_association_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_trigger_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.trigger_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.TriggerBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_initialize_service_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.initialize_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.InitializeServiceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = BackupDRAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_management_servers_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ListManagementServersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_management_servers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.ListManagementServersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_management_server_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupdr.ManagementServer( - name='name_value', - description='description_value', - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag='etag_value', - oauth2_client_id='oauth2_client_id_value', - ba_proxy_uri=['ba_proxy_uri_value'], - satisfies_pzi=True, - )) - await client.get_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.GetManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_management_server_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.CreateManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_management_server_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.DeleteManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.CreateBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_vaults_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_backup_vaults(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupVaultsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.FetchUsableBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.fetch_usable_backup_vaults(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.FetchUsableBackupVaultsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.BackupVault( - name='name_value', - description='description_value', - deletable=True, - etag='etag_value', - state=backupvault.BackupVault.State.CREATING, - backup_count=1278, - service_account='service_account_value', - total_stored_bytes=1946, - uid='uid_value', - access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, - )) - await client.get_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_sources_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListDataSourcesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_source_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.DataSource( - name='name_value', - state=backupvault.DataSource.State.CREATING, - backup_count=1278, - etag='etag_value', - total_stored_bytes=1946, - config_state=backupvault.BackupConfigState.ACTIVE, - )) - await client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_source_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backups_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.ListBackupsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup( - name='name_value', - description='description_value', - etag='etag_value', - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - )) - await client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restore_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.restore_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.RestoreBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_plan_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.CreateBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_plan_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplan.BackupPlan( - name='name_value', - description='description_value', - state=backupplan.BackupPlan.State.CREATING, - resource_type='resource_type_value', - etag='etag_value', - backup_vault='backup_vault_value', - backup_vault_service_account='backup_vault_service_account_value', - )) - await client.get_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.GetBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_plans_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplan.ListBackupPlansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_backup_plans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.ListBackupPlansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_plan_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.DeleteBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_plan_association_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_plan_association_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.BackupPlanAssociation( - name='name_value', - resource_type='resource_type_value', - resource='resource_value', - backup_plan='backup_plan_value', - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source='data_source_value', - )) - await client.get_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.GetBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_plan_associations_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_backup_plan_associations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_plan_association_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_trigger_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.trigger_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.TriggerBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_initialize_service_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.initialize_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.InitializeServiceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = BackupDRClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_management_servers_rest_bad_request(request_type=backupdr.ListManagementServersRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_management_servers(request) - - -@pytest.mark.parametrize("request_type", [ - backupdr.ListManagementServersRequest, - dict, -]) -def test_list_management_servers_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_management_servers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_management_servers_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_management_servers") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_management_servers_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_list_management_servers") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupdr.ListManagementServersRequest.pb(backupdr.ListManagementServersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupdr.ListManagementServersResponse.to_json(backupdr.ListManagementServersResponse()) - req.return_value.content = return_value - - request = backupdr.ListManagementServersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupdr.ListManagementServersResponse() - post_with_metadata.return_value = backupdr.ListManagementServersResponse(), metadata - - client.list_management_servers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_management_server_rest_bad_request(request_type=backupdr.GetManagementServerRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_management_server(request) - - -@pytest.mark.parametrize("request_type", [ - backupdr.GetManagementServerRequest, - dict, -]) -def test_get_management_server_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer( - name='name_value', - description='description_value', - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag='etag_value', - oauth2_client_id='oauth2_client_id_value', - ba_proxy_uri=['ba_proxy_uri_value'], - satisfies_pzi=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_management_server(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == 'etag_value' - assert response.oauth2_client_id == 'oauth2_client_id_value' - assert response.ba_proxy_uri == ['ba_proxy_uri_value'] - assert response.satisfies_pzi is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_management_server_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_management_server") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_management_server_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_get_management_server") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupdr.GetManagementServerRequest.pb(backupdr.GetManagementServerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupdr.ManagementServer.to_json(backupdr.ManagementServer()) - req.return_value.content = return_value - - request = backupdr.GetManagementServerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupdr.ManagementServer() - post_with_metadata.return_value = backupdr.ManagementServer(), metadata - - client.get_management_server(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_management_server_rest_bad_request(request_type=backupdr.CreateManagementServerRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_management_server(request) - - -@pytest.mark.parametrize("request_type", [ - backupdr.CreateManagementServerRequest, - dict, -]) -def test_create_management_server_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["management_server"] = {'name': 'name_value', 'description': 'description_value', 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'type_': 1, 'management_uri': {'web_ui': 'web_ui_value', 'api': 'api_value'}, 'workforce_identity_based_management_uri': {'first_party_management_uri': 'first_party_management_uri_value', 'third_party_management_uri': 'third_party_management_uri_value'}, 'state': 1, 'networks': [{'network': 'network_value', 'peering_mode': 1}], 'etag': 'etag_value', 'oauth2_client_id': 'oauth2_client_id_value', 'workforce_identity_based_oauth2_client_id': {'first_party_oauth2_client_id': 'first_party_oauth2_client_id_value', 'third_party_oauth2_client_id': 'third_party_oauth2_client_id_value'}, 'ba_proxy_uri': ['ba_proxy_uri_value1', 'ba_proxy_uri_value2'], 'satisfies_pzs': {'value': True}, 'satisfies_pzi': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["management_server"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["management_server"][field])): - del request_init["management_server"][field][i][subfield] - else: - del request_init["management_server"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_management_server(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_management_server_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_management_server") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_management_server_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_create_management_server") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupdr.CreateManagementServerRequest.pb(backupdr.CreateManagementServerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupdr.CreateManagementServerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_management_server(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_management_server_rest_bad_request(request_type=backupdr.DeleteManagementServerRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_management_server(request) - - -@pytest.mark.parametrize("request_type", [ - backupdr.DeleteManagementServerRequest, - dict, -]) -def test_delete_management_server_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_management_server(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_management_server_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_management_server") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_management_server_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_delete_management_server") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupdr.DeleteManagementServerRequest.pb(backupdr.DeleteManagementServerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupdr.DeleteManagementServerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_management_server(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_vault_rest_bad_request(request_type=backupvault.CreateBackupVaultRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_vault(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.CreateBackupVaultRequest, - dict, -]) -def test_create_backup_vault_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["backup_vault"] = {'name': 'name_value', 'description': 'description_value', 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'backup_minimum_enforced_retention_duration': {'seconds': 751, 'nanos': 543}, 'deletable': True, 'etag': 'etag_value', 'state': 1, 'effective_time': {}, 'backup_count': 1278, 'service_account': 'service_account_value', 'total_stored_bytes': 1946, 'uid': 'uid_value', 'annotations': {}, 'access_restriction': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_vault"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_vault"][field])): - del request_init["backup_vault"][field][i][subfield] - else: - del request_init["backup_vault"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_vault(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_vault_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_backup_vault") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_backup_vault_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_create_backup_vault") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.CreateBackupVaultRequest.pb(backupvault.CreateBackupVaultRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.CreateBackupVaultRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_backup_vault(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_vaults_rest_bad_request(request_type=backupvault.ListBackupVaultsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_vaults(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.ListBackupVaultsRequest, - dict, -]) -def test_list_backup_vaults_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_vaults(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupVaultsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_vaults_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backup_vaults") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backup_vaults_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_list_backup_vaults") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.ListBackupVaultsRequest.pb(backupvault.ListBackupVaultsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListBackupVaultsResponse.to_json(backupvault.ListBackupVaultsResponse()) - req.return_value.content = return_value - - request = backupvault.ListBackupVaultsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.ListBackupVaultsResponse() - post_with_metadata.return_value = backupvault.ListBackupVaultsResponse(), metadata - - client.list_backup_vaults(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_fetch_usable_backup_vaults_rest_bad_request(request_type=backupvault.FetchUsableBackupVaultsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.fetch_usable_backup_vaults(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.FetchUsableBackupVaultsRequest, - dict, -]) -def test_fetch_usable_backup_vaults_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.FetchUsableBackupVaultsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.fetch_usable_backup_vaults(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchUsableBackupVaultsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.FetchUsableBackupVaultsRequest.pb(backupvault.FetchUsableBackupVaultsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.FetchUsableBackupVaultsResponse.to_json(backupvault.FetchUsableBackupVaultsResponse()) - req.return_value.content = return_value - - request = backupvault.FetchUsableBackupVaultsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.FetchUsableBackupVaultsResponse() - post_with_metadata.return_value = backupvault.FetchUsableBackupVaultsResponse(), metadata - - client.fetch_usable_backup_vaults(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_vault_rest_bad_request(request_type=backupvault.GetBackupVaultRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_vault(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.GetBackupVaultRequest, - dict, -]) -def test_get_backup_vault_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.BackupVault( - name='name_value', - description='description_value', - deletable=True, - etag='etag_value', - state=backupvault.BackupVault.State.CREATING, - backup_count=1278, - service_account='service_account_value', - total_stored_bytes=1946, - uid='uid_value', - access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.BackupVault.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_vault(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.BackupVault) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.deletable is True - assert response.etag == 'etag_value' - assert response.state == backupvault.BackupVault.State.CREATING - assert response.backup_count == 1278 - assert response.service_account == 'service_account_value' - assert response.total_stored_bytes == 1946 - assert response.uid == 'uid_value' - assert response.access_restriction == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_vault_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_vault") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_vault_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_get_backup_vault") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.GetBackupVaultRequest.pb(backupvault.GetBackupVaultRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.BackupVault.to_json(backupvault.BackupVault()) - req.return_value.content = return_value - - request = backupvault.GetBackupVaultRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.BackupVault() - post_with_metadata.return_value = backupvault.BackupVault(), metadata - - client.get_backup_vault(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_vault_rest_bad_request(request_type=backupvault.UpdateBackupVaultRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'backup_vault': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_vault(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.UpdateBackupVaultRequest, - dict, -]) -def test_update_backup_vault_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'backup_vault': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'}} - request_init["backup_vault"] = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3', 'description': 'description_value', 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'backup_minimum_enforced_retention_duration': {'seconds': 751, 'nanos': 543}, 'deletable': True, 'etag': 'etag_value', 'state': 1, 'effective_time': {}, 'backup_count': 1278, 'service_account': 'service_account_value', 'total_stored_bytes': 1946, 'uid': 'uid_value', 'annotations': {}, 'access_restriction': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_vault"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_vault"][field])): - del request_init["backup_vault"][field][i][subfield] - else: - del request_init["backup_vault"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_vault(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_vault_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_update_backup_vault") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_update_backup_vault_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_update_backup_vault") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateBackupVaultRequest.pb(backupvault.UpdateBackupVaultRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.UpdateBackupVaultRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_backup_vault(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_vault_rest_bad_request(request_type=backupvault.DeleteBackupVaultRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_vault(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.DeleteBackupVaultRequest, - dict, -]) -def test_delete_backup_vault_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_vault(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_vault_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_vault") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_vault_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_delete_backup_vault") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.DeleteBackupVaultRequest.pb(backupvault.DeleteBackupVaultRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.DeleteBackupVaultRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_backup_vault(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_sources_rest_bad_request(request_type=backupvault.ListDataSourcesRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_sources(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.ListDataSourcesRequest, - dict, -]) -def test_list_data_sources_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListDataSourcesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_sources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_sources_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_data_sources") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_data_sources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_list_data_sources") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.ListDataSourcesRequest.pb(backupvault.ListDataSourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListDataSourcesResponse.to_json(backupvault.ListDataSourcesResponse()) - req.return_value.content = return_value - - request = backupvault.ListDataSourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.ListDataSourcesResponse() - post_with_metadata.return_value = backupvault.ListDataSourcesResponse(), metadata - - client.list_data_sources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_data_source_rest_bad_request(request_type=backupvault.GetDataSourceRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_source(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.GetDataSourceRequest, - dict, -]) -def test_get_data_source_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource( - name='name_value', - state=backupvault.DataSource.State.CREATING, - backup_count=1278, - etag='etag_value', - total_stored_bytes=1946, - config_state=backupvault.BackupConfigState.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_source(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.DataSource) - assert response.name == 'name_value' - assert response.state == backupvault.DataSource.State.CREATING - assert response.backup_count == 1278 - assert response.etag == 'etag_value' - assert response.total_stored_bytes == 1946 - assert response.config_state == backupvault.BackupConfigState.ACTIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_source_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_data_source") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_data_source_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_get_data_source") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.GetDataSourceRequest.pb(backupvault.GetDataSourceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.DataSource.to_json(backupvault.DataSource()) - req.return_value.content = return_value - - request = backupvault.GetDataSourceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.DataSource() - post_with_metadata.return_value = backupvault.DataSource(), metadata - - client.get_data_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_data_source_rest_bad_request(request_type=backupvault.UpdateDataSourceRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'data_source': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_source(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.UpdateDataSourceRequest, - dict, -]) -def test_update_data_source_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'data_source': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'}} - request_init["data_source"] = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4', 'state': 1, 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'backup_count': 1278, 'etag': 'etag_value', 'total_stored_bytes': 1946, 'config_state': 1, 'backup_config_info': {'last_backup_state': 1, 'last_successful_backup_consistency_time': {}, 'last_backup_error': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'gcp_backup_config': {'backup_plan': 'backup_plan_value', 'backup_plan_description': 'backup_plan_description_value', 'backup_plan_association': 'backup_plan_association_value', 'backup_plan_rules': ['backup_plan_rules_value1', 'backup_plan_rules_value2']}, 'backup_appliance_backup_config': {'backup_appliance_name': 'backup_appliance_name_value', 'backup_appliance_id': 1966, 'sla_id': 620, 'application_name': 'application_name_value', 'host_name': 'host_name_value', 'slt_name': 'slt_name_value', 'slp_name': 'slp_name_value'}}, 'data_source_gcp_resource': {'gcp_resourcename': 'gcp_resourcename_value', 'location': 'location_value', 'type_': 'type__value', 'compute_instance_datasource_properties': {'name': 'name_value', 'description': 'description_value', 'machine_type': 'machine_type_value', 'total_disk_count': 1718, 'total_disk_size_gb': 1904}}, 'data_source_backup_appliance_application': {'application_name': 'application_name_value', 'backup_appliance': 'backup_appliance_value', 'appliance_id': 1241, 'type_': 'type__value', 'application_id': 1472, 'hostname': 'hostname_value', 'host_id': 746}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_source"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_source"][field])): - del request_init["data_source"][field][i][subfield] - else: - del request_init["data_source"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_source(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_source_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_update_data_source") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_update_data_source_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_update_data_source") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateDataSourceRequest.pb(backupvault.UpdateDataSourceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.UpdateDataSourceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_data_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.ListBackupsRequest, - dict, -]) -def test_list_backups_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backups") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backups_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_list_backups") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListBackupsResponse.to_json(backupvault.ListBackupsResponse()) - req.return_value.content = return_value - - request = backupvault.ListBackupsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.ListBackupsResponse() - post_with_metadata.return_value = backupvault.ListBackupsResponse(), metadata - - client.list_backups(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_rest_bad_request(request_type=backupvault.GetBackupRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.GetBackupRequest, - dict, -]) -def test_get_backup_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.Backup( - name='name_value', - description='description_value', - etag='etag_value', - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.Backup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.Backup) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.etag == 'etag_value' - assert response.state == backupvault.Backup.State.CREATING - assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED - assert response.resource_size_bytes == 2056 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_get_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.Backup.to_json(backupvault.Backup()) - req.return_value.content = return_value - - request = backupvault.GetBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.Backup() - post_with_metadata.return_value = backupvault.Backup(), metadata - - client.get_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_backup_rest_bad_request(request_type=backupvault.UpdateBackupRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'backup': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.UpdateBackupRequest, - dict, -]) -def test_update_backup_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'backup': {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'}} - request_init["backup"] = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'enforced_retention_end_time': {}, 'expire_time': {}, 'consistency_time': {}, 'etag': 'etag_value', 'state': 1, 'service_locks': [{'lock_until_time': {}, 'backup_appliance_lock_info': {'backup_appliance_id': 1966, 'backup_appliance_name': 'backup_appliance_name_value', 'lock_reason': 'lock_reason_value', 'job_name': 'job_name_value', 'backup_image': 'backup_image_value', 'sla_id': 620}, 'service_lock_info': {'operation': 'operation_value'}}], 'backup_appliance_locks': {}, 'compute_instance_backup_properties': {'description': 'description_value', 'tags': {'items': ['items_value1', 'items_value2']}, 'machine_type': 'machine_type_value', 'can_ip_forward': True, 'network_interface': [{'network': 'network_value', 'subnetwork': 'subnetwork_value', 'ip_address': 'ip_address_value', 'ipv6_address': 'ipv6_address_value', 'internal_ipv6_prefix_length': 2831, 'name': 'name_value', 'access_configs': [{'type_': 1, 'name': 'name_value', 'external_ip': 'external_ip_value', 'external_ipv6': 'external_ipv6_value', 'external_ipv6_prefix_length': 2837, 'set_public_ptr': True, 'public_ptr_domain_name': 'public_ptr_domain_name_value', 'network_tier': 1}], 'ipv6_access_configs': {}, 'alias_ip_ranges': [{'ip_cidr_range': 'ip_cidr_range_value', 'subnetwork_range_name': 'subnetwork_range_name_value'}], 'stack_type': 1, 'ipv6_access_type': 1, 'queue_count': 1197, 'nic_type': 1, 'network_attachment': 'network_attachment_value'}], 'disk': [{'initialize_params': {'disk_name': 'disk_name_value', 'replica_zones': ['replica_zones_value1', 'replica_zones_value2']}, 'device_name': 'device_name_value', 'kind': 'kind_value', 'disk_type_deprecated': 1, 'mode': 1, 'source': 'source_value', 'index': 536, 'boot': True, 'auto_delete': True, 'license_': ['license__value1', 'license__value2'], 'disk_interface': 1, 'guest_os_feature': [{'type_': 1}], 'disk_encryption_key': {'raw_key': 'raw_key_value', 'rsa_encrypted_key': 'rsa_encrypted_key_value', 'kms_key_name': 'kms_key_name_value', 'kms_key_service_account': 'kms_key_service_account_value'}, 'disk_size_gb': 1261, 'saved_state': 1, 'disk_type': 'disk_type_value', 'type_': 1}], 'metadata': {'items': [{'key': 'key_value', 'value': 'value_value'}]}, 'service_account': [{'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}], 'scheduling': {'on_host_maintenance': 1, 'automatic_restart': True, 'preemptible': True, 'node_affinities': [{'key': 'key_value', 'operator': 1, 'values': ['values_value1', 'values_value2']}], 'min_node_cpus': 1379, 'provisioning_model': 1, 'instance_termination_action': 1, 'local_ssd_recovery_timeout': {'seconds': 751, 'nanos': 543}}, 'guest_accelerator': [{'accelerator_type': 'accelerator_type_value', 'accelerator_count': 1805}], 'min_cpu_platform': 'min_cpu_platform_value', 'key_revocation_action_type': 1, 'source_instance': 'source_instance_value', 'labels': {}}, 'backup_appliance_backup_properties': {'generation_id': 1368, 'finalize_time': {}, 'recovery_range_start_time': {}, 'recovery_range_end_time': {}}, 'backup_type': 1, 'gcp_backup_plan_info': {'backup_plan': 'backup_plan_value', 'backup_plan_rule_id': 'backup_plan_rule_id_value'}, 'resource_size_bytes': 2056} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_update_backup") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_update_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_update_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateBackupRequest.pb(backupvault.UpdateBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.UpdateBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_rest_bad_request(request_type=backupvault.DeleteBackupRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.DeleteBackupRequest, - dict, -]) -def test_delete_backup_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_delete_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.DeleteBackupRequest.pb(backupvault.DeleteBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.DeleteBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_restore_backup_rest_bad_request(request_type=backupvault.RestoreBackupRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backupvault.RestoreBackupRequest, - dict, -]) -def test_restore_backup_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_backup_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_restore_backup") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_restore_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_restore_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.RestoreBackupRequest.pb(backupvault.RestoreBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupvault.RestoreBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.restore_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_plan_rest_bad_request(request_type=backupplan.CreateBackupPlanRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_plan(request) - - -@pytest.mark.parametrize("request_type", [ - backupplan.CreateBackupPlanRequest, - dict, -]) -def test_create_backup_plan_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["backup_plan"] = {'name': 'name_value', 'description': 'description_value', 'labels': {}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'backup_rules': [{'rule_id': 'rule_id_value', 'backup_retention_days': 2237, 'standard_schedule': {'recurrence_type': 1, 'hourly_frequency': 1748, 'days_of_week': [1], 'days_of_month': [1387, 1388], 'week_day_of_month': {'week_of_month': 1, 'day_of_week': 1}, 'months': [1], 'backup_window': {'start_hour_of_day': 1820, 'end_hour_of_day': 1573}, 'time_zone': 'time_zone_value'}}], 'state': 1, 'resource_type': 'resource_type_value', 'etag': 'etag_value', 'backup_vault': 'backup_vault_value', 'backup_vault_service_account': 'backup_vault_service_account_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_plan"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_plan"][field])): - del request_init["backup_plan"][field][i][subfield] - else: - del request_init["backup_plan"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_plan(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_plan_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_backup_plan") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_backup_plan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_create_backup_plan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplan.CreateBackupPlanRequest.pb(backupplan.CreateBackupPlanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupplan.CreateBackupPlanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_backup_plan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_plan_rest_bad_request(request_type=backupplan.GetBackupPlanRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_plan(request) - - -@pytest.mark.parametrize("request_type", [ - backupplan.GetBackupPlanRequest, - dict, -]) -def test_get_backup_plan_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplan.BackupPlan( - name='name_value', - description='description_value', - state=backupplan.BackupPlan.State.CREATING, - resource_type='resource_type_value', - etag='etag_value', - backup_vault='backup_vault_value', - backup_vault_service_account='backup_vault_service_account_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplan.BackupPlan.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_plan(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == 'resource_type_value' - assert response.etag == 'etag_value' - assert response.backup_vault == 'backup_vault_value' - assert response.backup_vault_service_account == 'backup_vault_service_account_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_plan_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_plan") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_plan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_get_backup_plan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplan.GetBackupPlanRequest.pb(backupplan.GetBackupPlanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplan.BackupPlan.to_json(backupplan.BackupPlan()) - req.return_value.content = return_value - - request = backupplan.GetBackupPlanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupplan.BackupPlan() - post_with_metadata.return_value = backupplan.BackupPlan(), metadata - - client.get_backup_plan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_plans_rest_bad_request(request_type=backupplan.ListBackupPlansRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_plans(request) - - -@pytest.mark.parametrize("request_type", [ - backupplan.ListBackupPlansRequest, - dict, -]) -def test_list_backup_plans_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplan.ListBackupPlansResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplan.ListBackupPlansResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_plans(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_plans_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backup_plans") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backup_plans_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_list_backup_plans") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplan.ListBackupPlansRequest.pb(backupplan.ListBackupPlansRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplan.ListBackupPlansResponse.to_json(backupplan.ListBackupPlansResponse()) - req.return_value.content = return_value - - request = backupplan.ListBackupPlansRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupplan.ListBackupPlansResponse() - post_with_metadata.return_value = backupplan.ListBackupPlansResponse(), metadata - - client.list_backup_plans(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_plan_rest_bad_request(request_type=backupplan.DeleteBackupPlanRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_plan(request) - - -@pytest.mark.parametrize("request_type", [ - backupplan.DeleteBackupPlanRequest, - dict, -]) -def test_delete_backup_plan_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlans/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_plan(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_plan_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_plan") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_plan_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_delete_backup_plan") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplan.DeleteBackupPlanRequest.pb(backupplan.DeleteBackupPlanRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupplan.DeleteBackupPlanRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_backup_plan(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_backup_plan_association_rest_bad_request(request_type=backupplanassociation.CreateBackupPlanAssociationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_plan_association(request) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.CreateBackupPlanAssociationRequest, - dict, -]) -def test_create_backup_plan_association_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["backup_plan_association"] = {'name': 'name_value', 'resource_type': 'resource_type_value', 'resource': 'resource_value', 'backup_plan': 'backup_plan_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'state': 1, 'rules_config_info': [{'rule_id': 'rule_id_value', 'last_backup_state': 1, 'last_backup_error': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'last_successful_backup_consistency_time': {}}], 'data_source': 'data_source_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields["backup_plan_association"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_plan_association"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_plan_association"][field])): - del request_init["backup_plan_association"][field][i][subfield] - else: - del request_init["backup_plan_association"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_plan_association(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_plan_association_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_backup_plan_association") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_create_backup_plan_association_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_create_backup_plan_association") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb(backupplanassociation.CreateBackupPlanAssociationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupplanassociation.CreateBackupPlanAssociationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_backup_plan_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_backup_plan_association_rest_bad_request(request_type=backupplanassociation.GetBackupPlanAssociationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_plan_association(request) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.GetBackupPlanAssociationRequest, - dict, -]) -def test_get_backup_plan_association_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplanassociation.BackupPlanAssociation( - name='name_value', - resource_type='resource_type_value', - resource='resource_value', - backup_plan='backup_plan_value', - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source='data_source_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_plan_association(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) - assert response.name == 'name_value' - assert response.resource_type == 'resource_type_value' - assert response.resource == 'resource_value' - assert response.backup_plan == 'backup_plan_value' - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == 'data_source_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_plan_association_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_plan_association") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_get_backup_plan_association_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_get_backup_plan_association") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb(backupplanassociation.GetBackupPlanAssociationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplanassociation.BackupPlanAssociation.to_json(backupplanassociation.BackupPlanAssociation()) - req.return_value.content = return_value - - request = backupplanassociation.GetBackupPlanAssociationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupplanassociation.BackupPlanAssociation() - post_with_metadata.return_value = backupplanassociation.BackupPlanAssociation(), metadata - - client.get_backup_plan_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_plan_associations_rest_bad_request(request_type=backupplanassociation.ListBackupPlanAssociationsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_plan_associations(request) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.ListBackupPlanAssociationsRequest, - dict, -]) -def test_list_backup_plan_associations_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_plan_associations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_plan_associations_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backup_plan_associations") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_list_backup_plan_associations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb(backupplanassociation.ListBackupPlanAssociationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.to_json(backupplanassociation.ListBackupPlanAssociationsResponse()) - req.return_value.content = return_value - - request = backupplanassociation.ListBackupPlanAssociationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - post_with_metadata.return_value = backupplanassociation.ListBackupPlanAssociationsResponse(), metadata - - client.list_backup_plan_associations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_backup_plan_association_rest_bad_request(request_type=backupplanassociation.DeleteBackupPlanAssociationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_plan_association(request) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.DeleteBackupPlanAssociationRequest, - dict, -]) -def test_delete_backup_plan_association_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_plan_association(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_plan_association_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_plan_association") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_delete_backup_plan_association_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb(backupplanassociation.DeleteBackupPlanAssociationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupplanassociation.DeleteBackupPlanAssociationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_backup_plan_association(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_trigger_backup_rest_bad_request(request_type=backupplanassociation.TriggerBackupRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.trigger_backup(request) - - -@pytest.mark.parametrize("request_type", [ - backupplanassociation.TriggerBackupRequest, - dict, -]) -def test_trigger_backup_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/backupPlanAssociations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.trigger_backup(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_trigger_backup_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_trigger_backup") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_trigger_backup_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_trigger_backup") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupplanassociation.TriggerBackupRequest.pb(backupplanassociation.TriggerBackupRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupplanassociation.TriggerBackupRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.trigger_backup(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_initialize_service_rest_bad_request(request_type=backupdr.InitializeServiceRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/serviceConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.initialize_service(request) - - -@pytest.mark.parametrize("request_type", [ - backupdr.InitializeServiceRequest, - dict, -]) -def test_initialize_service_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/serviceConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.initialize_service(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_initialize_service_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_initialize_service") as post, \ - mock.patch.object(transports.BackupDRRestInterceptor, "post_initialize_service_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BackupDRRestInterceptor, "pre_initialize_service") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupdr.InitializeServiceRequest.pb(backupdr.InitializeServiceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = backupdr.InitializeServiceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.initialize_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/managementServers/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/managementServers/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/managementServers/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/managementServers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_management_servers_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), - '__call__') as call: - client.list_management_servers(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.ListManagementServersRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_management_server_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), - '__call__') as call: - client.get_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.GetManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_management_server_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), - '__call__') as call: - client.create_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.CreateManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_management_server_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), - '__call__') as call: - client.delete_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.DeleteManagementServerRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_vault_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), - '__call__') as call: - client.create_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.CreateBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_vaults_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), - '__call__') as call: - client.list_backup_vaults(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupVaultsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_fetch_usable_backup_vaults_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), - '__call__') as call: - client.fetch_usable_backup_vaults(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.FetchUsableBackupVaultsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_vault_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_vault), - '__call__') as call: - client.get_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_vault_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), - '__call__') as call: - client.update_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_vault_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), - '__call__') as call: - client.delete_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupVaultRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_sources_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - client.list_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_source_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_source_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), - '__call__') as call: - client.update_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backups), - '__call__') as call: - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup), - '__call__') as call: - client.get_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup), - '__call__') as call: - client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup), - '__call__') as call: - client.delete_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_backup_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_backup), - '__call__') as call: - client.restore_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.RestoreBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_plan_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), - '__call__') as call: - client.create_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.CreateBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_plan_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan), - '__call__') as call: - client.get_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.GetBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_plans_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - '__call__') as call: - client.list_backup_plans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.ListBackupPlansRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_plan_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), - '__call__') as call: - client.delete_backup_plan(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.DeleteBackupPlanRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_plan_association_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), - '__call__') as call: - client.create_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_plan_association_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), - '__call__') as call: - client.get_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.GetBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_plan_associations_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), - '__call__') as call: - client.list_backup_plan_associations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_plan_association_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), - '__call__') as call: - client.delete_backup_plan_association(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_trigger_backup_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.trigger_backup), - '__call__') as call: - client.trigger_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.TriggerBackupRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_initialize_service_empty_call_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), - '__call__') as call: - client.initialize_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.InitializeServiceRequest() - - assert args[0] == request_msg - - -def test_backup_dr_rest_lro_client(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BackupDRGrpcTransport, - ) - -def test_backup_dr_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BackupDRTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_backup_dr_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.backupdr_v1.services.backup_dr.transports.BackupDRTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BackupDRTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_management_servers', - 'get_management_server', - 'create_management_server', - 'delete_management_server', - 'create_backup_vault', - 'list_backup_vaults', - 'fetch_usable_backup_vaults', - 'get_backup_vault', - 'update_backup_vault', - 'delete_backup_vault', - 'list_data_sources', - 'get_data_source', - 'update_data_source', - 'list_backups', - 'get_backup', - 'update_backup', - 'delete_backup', - 'restore_backup', - 'create_backup_plan', - 'get_backup_plan', - 'list_backup_plans', - 'delete_backup_plan', - 'create_backup_plan_association', - 'get_backup_plan_association', - 'list_backup_plan_associations', - 'delete_backup_plan_association', - 'trigger_backup', - 'initialize_service', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_backup_dr_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.backupdr_v1.services.backup_dr.transports.BackupDRTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BackupDRTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_backup_dr_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.backupdr_v1.services.backup_dr.transports.BackupDRTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BackupDRTransport() - adc.assert_called_once() - - -def test_backup_dr_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BackupDRClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BackupDRGrpcTransport, - transports.BackupDRGrpcAsyncIOTransport, - ], -) -def test_backup_dr_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BackupDRGrpcTransport, - transports.BackupDRGrpcAsyncIOTransport, - transports.BackupDRRestTransport, - ], -) -def test_backup_dr_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BackupDRGrpcTransport, grpc_helpers), - (transports.BackupDRGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_backup_dr_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "backupdr.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="backupdr.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BackupDRGrpcTransport, transports.BackupDRGrpcAsyncIOTransport]) -def test_backup_dr_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_backup_dr_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BackupDRRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_backup_dr_host_no_port(transport_name): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='backupdr.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'backupdr.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://backupdr.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_backup_dr_host_with_port(transport_name): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='backupdr.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'backupdr.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://backupdr.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_backup_dr_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = BackupDRClient( - credentials=creds1, - transport=transport_name, - ) - client2 = BackupDRClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_management_servers._session - session2 = client2.transport.list_management_servers._session - assert session1 != session2 - session1 = client1.transport.get_management_server._session - session2 = client2.transport.get_management_server._session - assert session1 != session2 - session1 = client1.transport.create_management_server._session - session2 = client2.transport.create_management_server._session - assert session1 != session2 - session1 = client1.transport.delete_management_server._session - session2 = client2.transport.delete_management_server._session - assert session1 != session2 - session1 = client1.transport.create_backup_vault._session - session2 = client2.transport.create_backup_vault._session - assert session1 != session2 - session1 = client1.transport.list_backup_vaults._session - session2 = client2.transport.list_backup_vaults._session - assert session1 != session2 - session1 = client1.transport.fetch_usable_backup_vaults._session - session2 = client2.transport.fetch_usable_backup_vaults._session - assert session1 != session2 - session1 = client1.transport.get_backup_vault._session - session2 = client2.transport.get_backup_vault._session - assert session1 != session2 - session1 = client1.transport.update_backup_vault._session - session2 = client2.transport.update_backup_vault._session - assert session1 != session2 - session1 = client1.transport.delete_backup_vault._session - session2 = client2.transport.delete_backup_vault._session - assert session1 != session2 - session1 = client1.transport.list_data_sources._session - session2 = client2.transport.list_data_sources._session - assert session1 != session2 - session1 = client1.transport.get_data_source._session - session2 = client2.transport.get_data_source._session - assert session1 != session2 - session1 = client1.transport.update_data_source._session - session2 = client2.transport.update_data_source._session - assert session1 != session2 - session1 = client1.transport.list_backups._session - session2 = client2.transport.list_backups._session - assert session1 != session2 - session1 = client1.transport.get_backup._session - session2 = client2.transport.get_backup._session - assert session1 != session2 - session1 = client1.transport.update_backup._session - session2 = client2.transport.update_backup._session - assert session1 != session2 - session1 = client1.transport.delete_backup._session - session2 = client2.transport.delete_backup._session - assert session1 != session2 - session1 = client1.transport.restore_backup._session - session2 = client2.transport.restore_backup._session - assert session1 != session2 - session1 = client1.transport.create_backup_plan._session - session2 = client2.transport.create_backup_plan._session - assert session1 != session2 - session1 = client1.transport.get_backup_plan._session - session2 = client2.transport.get_backup_plan._session - assert session1 != session2 - session1 = client1.transport.list_backup_plans._session - session2 = client2.transport.list_backup_plans._session - assert session1 != session2 - session1 = client1.transport.delete_backup_plan._session - session2 = client2.transport.delete_backup_plan._session - assert session1 != session2 - session1 = client1.transport.create_backup_plan_association._session - session2 = client2.transport.create_backup_plan_association._session - assert session1 != session2 - session1 = client1.transport.get_backup_plan_association._session - session2 = client2.transport.get_backup_plan_association._session - assert session1 != session2 - session1 = client1.transport.list_backup_plan_associations._session - session2 = client2.transport.list_backup_plan_associations._session - assert session1 != session2 - session1 = client1.transport.delete_backup_plan_association._session - session2 = client2.transport.delete_backup_plan_association._session - assert session1 != session2 - session1 = client1.transport.trigger_backup._session - session2 = client2.transport.trigger_backup._session - assert session1 != session2 - session1 = client1.transport.initialize_service._session - session2 = client2.transport.initialize_service._session - assert session1 != session2 -def test_backup_dr_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BackupDRGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_backup_dr_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BackupDRGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BackupDRGrpcTransport, transports.BackupDRGrpcAsyncIOTransport]) -def test_backup_dr_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BackupDRGrpcTransport, transports.BackupDRGrpcAsyncIOTransport]) -def test_backup_dr_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_backup_dr_grpc_lro_client(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_dr_grpc_lro_async_client(): - client = BackupDRAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_backup_path(): - project = "squid" - location = "clam" - backupvault = "whelk" - datasource = "octopus" - backup = "oyster" - expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup}".format(project=project, location=location, backupvault=backupvault, datasource=datasource, backup=backup, ) - actual = BackupDRClient.backup_path(project, location, backupvault, datasource, backup) - assert expected == actual - - -def test_parse_backup_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "backupvault": "mussel", - "datasource": "winkle", - "backup": "nautilus", - } - path = BackupDRClient.backup_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_backup_path(path) - assert expected == actual - -def test_backup_plan_path(): - project = "scallop" - location = "abalone" - backup_plan = "squid" - expected = "projects/{project}/locations/{location}/backupPlans/{backup_plan}".format(project=project, location=location, backup_plan=backup_plan, ) - actual = BackupDRClient.backup_plan_path(project, location, backup_plan) - assert expected == actual - - -def test_parse_backup_plan_path(): - expected = { - "project": "clam", - "location": "whelk", - "backup_plan": "octopus", - } - path = BackupDRClient.backup_plan_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_backup_plan_path(path) - assert expected == actual - -def test_backup_plan_association_path(): - project = "oyster" - location = "nudibranch" - backup_plan_association = "cuttlefish" - expected = "projects/{project}/locations/{location}/backupPlanAssociations/{backup_plan_association}".format(project=project, location=location, backup_plan_association=backup_plan_association, ) - actual = BackupDRClient.backup_plan_association_path(project, location, backup_plan_association) - assert expected == actual - - -def test_parse_backup_plan_association_path(): - expected = { - "project": "mussel", - "location": "winkle", - "backup_plan_association": "nautilus", - } - path = BackupDRClient.backup_plan_association_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_backup_plan_association_path(path) - assert expected == actual - -def test_backup_vault_path(): - project = "scallop" - location = "abalone" - backupvault = "squid" - expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}".format(project=project, location=location, backupvault=backupvault, ) - actual = BackupDRClient.backup_vault_path(project, location, backupvault) - assert expected == actual - - -def test_parse_backup_vault_path(): - expected = { - "project": "clam", - "location": "whelk", - "backupvault": "octopus", - } - path = BackupDRClient.backup_vault_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_backup_vault_path(path) - assert expected == actual - -def test_data_source_path(): - project = "oyster" - location = "nudibranch" - backupvault = "cuttlefish" - datasource = "mussel" - expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format(project=project, location=location, backupvault=backupvault, datasource=datasource, ) - actual = BackupDRClient.data_source_path(project, location, backupvault, datasource) - assert expected == actual - - -def test_parse_data_source_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "backupvault": "scallop", - "datasource": "abalone", - } - path = BackupDRClient.data_source_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_data_source_path(path) - assert expected == actual - -def test_management_server_path(): - project = "squid" - location = "clam" - managementserver = "whelk" - expected = "projects/{project}/locations/{location}/managementServers/{managementserver}".format(project=project, location=location, managementserver=managementserver, ) - actual = BackupDRClient.management_server_path(project, location, managementserver) - assert expected == actual - - -def test_parse_management_server_path(): - expected = { - "project": "octopus", - "location": "oyster", - "managementserver": "nudibranch", - } - path = BackupDRClient.management_server_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_management_server_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BackupDRClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = BackupDRClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = BackupDRClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = BackupDRClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BackupDRClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = BackupDRClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = BackupDRClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = BackupDRClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BackupDRClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = BackupDRClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BackupDRClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BackupDRTransport, '_prep_wrapped_messages') as prep: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BackupDRTransport, '_prep_wrapped_messages') as prep: - transport_class = BackupDRClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BackupDRClient, transports.BackupDRGrpcTransport), - (BackupDRAsyncClient, transports.BackupDRGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/.coveragerc b/owl-bot-staging/google-cloud-bare-metal-solution/v2/.coveragerc deleted file mode 100644 index 22a5f372fd1a..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bare_metal_solution/__init__.py - google/cloud/bare_metal_solution/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/.flake8 b/owl-bot-staging/google-cloud-bare-metal-solution/v2/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/MANIFEST.in b/owl-bot-staging/google-cloud-bare-metal-solution/v2/MANIFEST.in deleted file mode 100644 index 5f511e14998c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bare_metal_solution *.py -recursive-include google/cloud/bare_metal_solution_v2 *.py diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/README.rst b/owl-bot-staging/google-cloud-bare-metal-solution/v2/README.rst deleted file mode 100644 index e0d5dd5d3ed5..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bare Metal Solution API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bare Metal Solution API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/_static/custom.css b/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/bare_metal_solution.rst b/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/bare_metal_solution.rst deleted file mode 100644 index 304af0b9ebee..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/bare_metal_solution.rst +++ /dev/null @@ -1,10 +0,0 @@ -BareMetalSolution ------------------------------------ - -.. automodule:: google.cloud.bare_metal_solution_v2.services.bare_metal_solution - :members: - :inherited-members: - -.. automodule:: google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/services_.rst b/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/services_.rst deleted file mode 100644 index 029b46927fe3..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bare Metal Solution v2 API -==================================================== -.. toctree:: - :maxdepth: 2 - - bare_metal_solution diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/types_.rst b/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/types_.rst deleted file mode 100644 index 0433ab76c0f6..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/bare_metal_solution_v2/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bare Metal Solution v2 API -================================================= - -.. automodule:: google.cloud.bare_metal_solution_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/conf.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/conf.py deleted file mode 100644 index fc7a9e412dff..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bare-metal-solution documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bare-metal-solution" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bare-metal-solution-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bare-metal-solution.tex", - u"google-cloud-bare-metal-solution Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bare-metal-solution", - u"Google Cloud Bare Metal Solution Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bare-metal-solution", - u"google-cloud-bare-metal-solution Documentation", - author, - "google-cloud-bare-metal-solution", - "GAPIC library for Google Cloud Bare Metal Solution API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/index.rst b/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/index.rst deleted file mode 100644 index 61fd5ae592fb..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bare_metal_solution_v2/services_ - bare_metal_solution_v2/types_ diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/__init__.py deleted file mode 100644 index 55944c9cda86..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/__init__.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bare_metal_solution import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution.client import BareMetalSolutionClient -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution.async_client import BareMetalSolutionAsyncClient - -from google.cloud.bare_metal_solution_v2.types.baremetalsolution import OperationMetadata -from google.cloud.bare_metal_solution_v2.types.baremetalsolution import ResetInstanceResponse -from google.cloud.bare_metal_solution_v2.types.common import VolumePerformanceTier -from google.cloud.bare_metal_solution_v2.types.common import WorkloadProfile -from google.cloud.bare_metal_solution_v2.types.instance import DetachLunRequest -from google.cloud.bare_metal_solution_v2.types.instance import DisableInteractiveSerialConsoleRequest -from google.cloud.bare_metal_solution_v2.types.instance import DisableInteractiveSerialConsoleResponse -from google.cloud.bare_metal_solution_v2.types.instance import EnableInteractiveSerialConsoleRequest -from google.cloud.bare_metal_solution_v2.types.instance import EnableInteractiveSerialConsoleResponse -from google.cloud.bare_metal_solution_v2.types.instance import GetInstanceRequest -from google.cloud.bare_metal_solution_v2.types.instance import Instance -from google.cloud.bare_metal_solution_v2.types.instance import ListInstancesRequest -from google.cloud.bare_metal_solution_v2.types.instance import ListInstancesResponse -from google.cloud.bare_metal_solution_v2.types.instance import RenameInstanceRequest -from google.cloud.bare_metal_solution_v2.types.instance import ResetInstanceRequest -from google.cloud.bare_metal_solution_v2.types.instance import ServerNetworkTemplate -from google.cloud.bare_metal_solution_v2.types.instance import StartInstanceRequest -from google.cloud.bare_metal_solution_v2.types.instance import StartInstanceResponse -from google.cloud.bare_metal_solution_v2.types.instance import StopInstanceRequest -from google.cloud.bare_metal_solution_v2.types.instance import StopInstanceResponse -from google.cloud.bare_metal_solution_v2.types.instance import UpdateInstanceRequest -from google.cloud.bare_metal_solution_v2.types.lun import EvictLunRequest -from google.cloud.bare_metal_solution_v2.types.lun import GetLunRequest -from google.cloud.bare_metal_solution_v2.types.lun import ListLunsRequest -from google.cloud.bare_metal_solution_v2.types.lun import ListLunsResponse -from google.cloud.bare_metal_solution_v2.types.lun import Lun -from google.cloud.bare_metal_solution_v2.types.network import GetNetworkRequest -from google.cloud.bare_metal_solution_v2.types.network import ListNetworksRequest -from google.cloud.bare_metal_solution_v2.types.network import ListNetworksResponse -from google.cloud.bare_metal_solution_v2.types.network import ListNetworkUsageRequest -from google.cloud.bare_metal_solution_v2.types.network import ListNetworkUsageResponse -from google.cloud.bare_metal_solution_v2.types.network import LogicalInterface -from google.cloud.bare_metal_solution_v2.types.network import Network -from google.cloud.bare_metal_solution_v2.types.network import NetworkAddressReservation -from google.cloud.bare_metal_solution_v2.types.network import NetworkMountPoint -from google.cloud.bare_metal_solution_v2.types.network import NetworkUsage -from google.cloud.bare_metal_solution_v2.types.network import RenameNetworkRequest -from google.cloud.bare_metal_solution_v2.types.network import UpdateNetworkRequest -from google.cloud.bare_metal_solution_v2.types.network import VRF -from google.cloud.bare_metal_solution_v2.types.nfs_share import CreateNfsShareRequest -from google.cloud.bare_metal_solution_v2.types.nfs_share import DeleteNfsShareRequest -from google.cloud.bare_metal_solution_v2.types.nfs_share import GetNfsShareRequest -from google.cloud.bare_metal_solution_v2.types.nfs_share import ListNfsSharesRequest -from google.cloud.bare_metal_solution_v2.types.nfs_share import ListNfsSharesResponse -from google.cloud.bare_metal_solution_v2.types.nfs_share import NfsShare -from google.cloud.bare_metal_solution_v2.types.nfs_share import RenameNfsShareRequest -from google.cloud.bare_metal_solution_v2.types.nfs_share import UpdateNfsShareRequest -from google.cloud.bare_metal_solution_v2.types.osimage import ListOSImagesRequest -from google.cloud.bare_metal_solution_v2.types.osimage import ListOSImagesResponse -from google.cloud.bare_metal_solution_v2.types.osimage import OSImage -from google.cloud.bare_metal_solution_v2.types.provisioning import CreateProvisioningConfigRequest -from google.cloud.bare_metal_solution_v2.types.provisioning import GetProvisioningConfigRequest -from google.cloud.bare_metal_solution_v2.types.provisioning import InstanceConfig -from google.cloud.bare_metal_solution_v2.types.provisioning import InstanceQuota -from google.cloud.bare_metal_solution_v2.types.provisioning import ListProvisioningQuotasRequest -from google.cloud.bare_metal_solution_v2.types.provisioning import ListProvisioningQuotasResponse -from google.cloud.bare_metal_solution_v2.types.provisioning import NetworkConfig -from google.cloud.bare_metal_solution_v2.types.provisioning import ProvisioningConfig -from google.cloud.bare_metal_solution_v2.types.provisioning import ProvisioningQuota -from google.cloud.bare_metal_solution_v2.types.provisioning import SubmitProvisioningConfigRequest -from google.cloud.bare_metal_solution_v2.types.provisioning import SubmitProvisioningConfigResponse -from google.cloud.bare_metal_solution_v2.types.provisioning import UpdateProvisioningConfigRequest -from google.cloud.bare_metal_solution_v2.types.provisioning import VolumeConfig -from google.cloud.bare_metal_solution_v2.types.ssh_key import CreateSSHKeyRequest -from google.cloud.bare_metal_solution_v2.types.ssh_key import DeleteSSHKeyRequest -from google.cloud.bare_metal_solution_v2.types.ssh_key import ListSSHKeysRequest -from google.cloud.bare_metal_solution_v2.types.ssh_key import ListSSHKeysResponse -from google.cloud.bare_metal_solution_v2.types.ssh_key import SSHKey -from google.cloud.bare_metal_solution_v2.types.volume import EvictVolumeRequest -from google.cloud.bare_metal_solution_v2.types.volume import GetVolumeRequest -from google.cloud.bare_metal_solution_v2.types.volume import ListVolumesRequest -from google.cloud.bare_metal_solution_v2.types.volume import ListVolumesResponse -from google.cloud.bare_metal_solution_v2.types.volume import RenameVolumeRequest -from google.cloud.bare_metal_solution_v2.types.volume import ResizeVolumeRequest -from google.cloud.bare_metal_solution_v2.types.volume import UpdateVolumeRequest -from google.cloud.bare_metal_solution_v2.types.volume import Volume -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import CreateVolumeSnapshotRequest -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import DeleteVolumeSnapshotRequest -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import GetVolumeSnapshotRequest -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import ListVolumeSnapshotsRequest -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import ListVolumeSnapshotsResponse -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import RestoreVolumeSnapshotRequest -from google.cloud.bare_metal_solution_v2.types.volume_snapshot import VolumeSnapshot - -__all__ = ('BareMetalSolutionClient', - 'BareMetalSolutionAsyncClient', - 'OperationMetadata', - 'ResetInstanceResponse', - 'VolumePerformanceTier', - 'WorkloadProfile', - 'DetachLunRequest', - 'DisableInteractiveSerialConsoleRequest', - 'DisableInteractiveSerialConsoleResponse', - 'EnableInteractiveSerialConsoleRequest', - 'EnableInteractiveSerialConsoleResponse', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'RenameInstanceRequest', - 'ResetInstanceRequest', - 'ServerNetworkTemplate', - 'StartInstanceRequest', - 'StartInstanceResponse', - 'StopInstanceRequest', - 'StopInstanceResponse', - 'UpdateInstanceRequest', - 'EvictLunRequest', - 'GetLunRequest', - 'ListLunsRequest', - 'ListLunsResponse', - 'Lun', - 'GetNetworkRequest', - 'ListNetworksRequest', - 'ListNetworksResponse', - 'ListNetworkUsageRequest', - 'ListNetworkUsageResponse', - 'LogicalInterface', - 'Network', - 'NetworkAddressReservation', - 'NetworkMountPoint', - 'NetworkUsage', - 'RenameNetworkRequest', - 'UpdateNetworkRequest', - 'VRF', - 'CreateNfsShareRequest', - 'DeleteNfsShareRequest', - 'GetNfsShareRequest', - 'ListNfsSharesRequest', - 'ListNfsSharesResponse', - 'NfsShare', - 'RenameNfsShareRequest', - 'UpdateNfsShareRequest', - 'ListOSImagesRequest', - 'ListOSImagesResponse', - 'OSImage', - 'CreateProvisioningConfigRequest', - 'GetProvisioningConfigRequest', - 'InstanceConfig', - 'InstanceQuota', - 'ListProvisioningQuotasRequest', - 'ListProvisioningQuotasResponse', - 'NetworkConfig', - 'ProvisioningConfig', - 'ProvisioningQuota', - 'SubmitProvisioningConfigRequest', - 'SubmitProvisioningConfigResponse', - 'UpdateProvisioningConfigRequest', - 'VolumeConfig', - 'CreateSSHKeyRequest', - 'DeleteSSHKeyRequest', - 'ListSSHKeysRequest', - 'ListSSHKeysResponse', - 'SSHKey', - 'EvictVolumeRequest', - 'GetVolumeRequest', - 'ListVolumesRequest', - 'ListVolumesResponse', - 'RenameVolumeRequest', - 'ResizeVolumeRequest', - 'UpdateVolumeRequest', - 'Volume', - 'CreateVolumeSnapshotRequest', - 'DeleteVolumeSnapshotRequest', - 'GetVolumeSnapshotRequest', - 'ListVolumeSnapshotsRequest', - 'ListVolumeSnapshotsResponse', - 'RestoreVolumeSnapshotRequest', - 'VolumeSnapshot', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/gapic_version.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/py.typed b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/py.typed deleted file mode 100644 index 2afe13f6aed8..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bare-metal-solution package uses inline types. diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/__init__.py deleted file mode 100644 index 200d25acdd4b..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/__init__.py +++ /dev/null @@ -1,194 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bare_metal_solution_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.bare_metal_solution import BareMetalSolutionClient -from .services.bare_metal_solution import BareMetalSolutionAsyncClient - -from .types.baremetalsolution import OperationMetadata -from .types.baremetalsolution import ResetInstanceResponse -from .types.common import VolumePerformanceTier -from .types.common import WorkloadProfile -from .types.instance import DetachLunRequest -from .types.instance import DisableInteractiveSerialConsoleRequest -from .types.instance import DisableInteractiveSerialConsoleResponse -from .types.instance import EnableInteractiveSerialConsoleRequest -from .types.instance import EnableInteractiveSerialConsoleResponse -from .types.instance import GetInstanceRequest -from .types.instance import Instance -from .types.instance import ListInstancesRequest -from .types.instance import ListInstancesResponse -from .types.instance import RenameInstanceRequest -from .types.instance import ResetInstanceRequest -from .types.instance import ServerNetworkTemplate -from .types.instance import StartInstanceRequest -from .types.instance import StartInstanceResponse -from .types.instance import StopInstanceRequest -from .types.instance import StopInstanceResponse -from .types.instance import UpdateInstanceRequest -from .types.lun import EvictLunRequest -from .types.lun import GetLunRequest -from .types.lun import ListLunsRequest -from .types.lun import ListLunsResponse -from .types.lun import Lun -from .types.network import GetNetworkRequest -from .types.network import ListNetworksRequest -from .types.network import ListNetworksResponse -from .types.network import ListNetworkUsageRequest -from .types.network import ListNetworkUsageResponse -from .types.network import LogicalInterface -from .types.network import Network -from .types.network import NetworkAddressReservation -from .types.network import NetworkMountPoint -from .types.network import NetworkUsage -from .types.network import RenameNetworkRequest -from .types.network import UpdateNetworkRequest -from .types.network import VRF -from .types.nfs_share import CreateNfsShareRequest -from .types.nfs_share import DeleteNfsShareRequest -from .types.nfs_share import GetNfsShareRequest -from .types.nfs_share import ListNfsSharesRequest -from .types.nfs_share import ListNfsSharesResponse -from .types.nfs_share import NfsShare -from .types.nfs_share import RenameNfsShareRequest -from .types.nfs_share import UpdateNfsShareRequest -from .types.osimage import ListOSImagesRequest -from .types.osimage import ListOSImagesResponse -from .types.osimage import OSImage -from .types.provisioning import CreateProvisioningConfigRequest -from .types.provisioning import GetProvisioningConfigRequest -from .types.provisioning import InstanceConfig -from .types.provisioning import InstanceQuota -from .types.provisioning import ListProvisioningQuotasRequest -from .types.provisioning import ListProvisioningQuotasResponse -from .types.provisioning import NetworkConfig -from .types.provisioning import ProvisioningConfig -from .types.provisioning import ProvisioningQuota -from .types.provisioning import SubmitProvisioningConfigRequest -from .types.provisioning import SubmitProvisioningConfigResponse -from .types.provisioning import UpdateProvisioningConfigRequest -from .types.provisioning import VolumeConfig -from .types.ssh_key import CreateSSHKeyRequest -from .types.ssh_key import DeleteSSHKeyRequest -from .types.ssh_key import ListSSHKeysRequest -from .types.ssh_key import ListSSHKeysResponse -from .types.ssh_key import SSHKey -from .types.volume import EvictVolumeRequest -from .types.volume import GetVolumeRequest -from .types.volume import ListVolumesRequest -from .types.volume import ListVolumesResponse -from .types.volume import RenameVolumeRequest -from .types.volume import ResizeVolumeRequest -from .types.volume import UpdateVolumeRequest -from .types.volume import Volume -from .types.volume_snapshot import CreateVolumeSnapshotRequest -from .types.volume_snapshot import DeleteVolumeSnapshotRequest -from .types.volume_snapshot import GetVolumeSnapshotRequest -from .types.volume_snapshot import ListVolumeSnapshotsRequest -from .types.volume_snapshot import ListVolumeSnapshotsResponse -from .types.volume_snapshot import RestoreVolumeSnapshotRequest -from .types.volume_snapshot import VolumeSnapshot - -__all__ = ( - 'BareMetalSolutionAsyncClient', -'BareMetalSolutionClient', -'CreateNfsShareRequest', -'CreateProvisioningConfigRequest', -'CreateSSHKeyRequest', -'CreateVolumeSnapshotRequest', -'DeleteNfsShareRequest', -'DeleteSSHKeyRequest', -'DeleteVolumeSnapshotRequest', -'DetachLunRequest', -'DisableInteractiveSerialConsoleRequest', -'DisableInteractiveSerialConsoleResponse', -'EnableInteractiveSerialConsoleRequest', -'EnableInteractiveSerialConsoleResponse', -'EvictLunRequest', -'EvictVolumeRequest', -'GetInstanceRequest', -'GetLunRequest', -'GetNetworkRequest', -'GetNfsShareRequest', -'GetProvisioningConfigRequest', -'GetVolumeRequest', -'GetVolumeSnapshotRequest', -'Instance', -'InstanceConfig', -'InstanceQuota', -'ListInstancesRequest', -'ListInstancesResponse', -'ListLunsRequest', -'ListLunsResponse', -'ListNetworkUsageRequest', -'ListNetworkUsageResponse', -'ListNetworksRequest', -'ListNetworksResponse', -'ListNfsSharesRequest', -'ListNfsSharesResponse', -'ListOSImagesRequest', -'ListOSImagesResponse', -'ListProvisioningQuotasRequest', -'ListProvisioningQuotasResponse', -'ListSSHKeysRequest', -'ListSSHKeysResponse', -'ListVolumeSnapshotsRequest', -'ListVolumeSnapshotsResponse', -'ListVolumesRequest', -'ListVolumesResponse', -'LogicalInterface', -'Lun', -'Network', -'NetworkAddressReservation', -'NetworkConfig', -'NetworkMountPoint', -'NetworkUsage', -'NfsShare', -'OSImage', -'OperationMetadata', -'ProvisioningConfig', -'ProvisioningQuota', -'RenameInstanceRequest', -'RenameNetworkRequest', -'RenameNfsShareRequest', -'RenameVolumeRequest', -'ResetInstanceRequest', -'ResetInstanceResponse', -'ResizeVolumeRequest', -'RestoreVolumeSnapshotRequest', -'SSHKey', -'ServerNetworkTemplate', -'StartInstanceRequest', -'StartInstanceResponse', -'StopInstanceRequest', -'StopInstanceResponse', -'SubmitProvisioningConfigRequest', -'SubmitProvisioningConfigResponse', -'UpdateInstanceRequest', -'UpdateNetworkRequest', -'UpdateNfsShareRequest', -'UpdateProvisioningConfigRequest', -'UpdateVolumeRequest', -'VRF', -'Volume', -'VolumeConfig', -'VolumePerformanceTier', -'VolumeSnapshot', -'WorkloadProfile', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/gapic_metadata.json b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/gapic_metadata.json deleted file mode 100644 index bbf404937b49..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/gapic_metadata.json +++ /dev/null @@ -1,688 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bare_metal_solution_v2", - "protoPackage": "google.cloud.baremetalsolution.v2", - "schema": "1.0", - "services": { - "BareMetalSolution": { - "clients": { - "grpc": { - "libraryClient": "BareMetalSolutionClient", - "rpcs": { - "CreateNfsShare": { - "methods": [ - "create_nfs_share" - ] - }, - "CreateProvisioningConfig": { - "methods": [ - "create_provisioning_config" - ] - }, - "CreateSSHKey": { - "methods": [ - "create_ssh_key" - ] - }, - "CreateVolumeSnapshot": { - "methods": [ - "create_volume_snapshot" - ] - }, - "DeleteNfsShare": { - "methods": [ - "delete_nfs_share" - ] - }, - "DeleteSSHKey": { - "methods": [ - "delete_ssh_key" - ] - }, - "DeleteVolumeSnapshot": { - "methods": [ - "delete_volume_snapshot" - ] - }, - "DetachLun": { - "methods": [ - "detach_lun" - ] - }, - "DisableInteractiveSerialConsole": { - "methods": [ - "disable_interactive_serial_console" - ] - }, - "EnableInteractiveSerialConsole": { - "methods": [ - "enable_interactive_serial_console" - ] - }, - "EvictLun": { - "methods": [ - "evict_lun" - ] - }, - "EvictVolume": { - "methods": [ - "evict_volume" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetLun": { - "methods": [ - "get_lun" - ] - }, - "GetNetwork": { - "methods": [ - "get_network" - ] - }, - "GetNfsShare": { - "methods": [ - "get_nfs_share" - ] - }, - "GetProvisioningConfig": { - "methods": [ - "get_provisioning_config" - ] - }, - "GetVolume": { - "methods": [ - "get_volume" - ] - }, - "GetVolumeSnapshot": { - "methods": [ - "get_volume_snapshot" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "ListLuns": { - "methods": [ - "list_luns" - ] - }, - "ListNetworkUsage": { - "methods": [ - "list_network_usage" - ] - }, - "ListNetworks": { - "methods": [ - "list_networks" - ] - }, - "ListNfsShares": { - "methods": [ - "list_nfs_shares" - ] - }, - "ListOSImages": { - "methods": [ - "list_os_images" - ] - }, - "ListProvisioningQuotas": { - "methods": [ - "list_provisioning_quotas" - ] - }, - "ListSSHKeys": { - "methods": [ - "list_ssh_keys" - ] - }, - "ListVolumeSnapshots": { - "methods": [ - "list_volume_snapshots" - ] - }, - "ListVolumes": { - "methods": [ - "list_volumes" - ] - }, - "RenameInstance": { - "methods": [ - "rename_instance" - ] - }, - "RenameNetwork": { - "methods": [ - "rename_network" - ] - }, - "RenameNfsShare": { - "methods": [ - "rename_nfs_share" - ] - }, - "RenameVolume": { - "methods": [ - "rename_volume" - ] - }, - "ResetInstance": { - "methods": [ - "reset_instance" - ] - }, - "ResizeVolume": { - "methods": [ - "resize_volume" - ] - }, - "RestoreVolumeSnapshot": { - "methods": [ - "restore_volume_snapshot" - ] - }, - "StartInstance": { - "methods": [ - "start_instance" - ] - }, - "StopInstance": { - "methods": [ - "stop_instance" - ] - }, - "SubmitProvisioningConfig": { - "methods": [ - "submit_provisioning_config" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateNetwork": { - "methods": [ - "update_network" - ] - }, - "UpdateNfsShare": { - "methods": [ - "update_nfs_share" - ] - }, - "UpdateProvisioningConfig": { - "methods": [ - "update_provisioning_config" - ] - }, - "UpdateVolume": { - "methods": [ - "update_volume" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BareMetalSolutionAsyncClient", - "rpcs": { - "CreateNfsShare": { - "methods": [ - "create_nfs_share" - ] - }, - "CreateProvisioningConfig": { - "methods": [ - "create_provisioning_config" - ] - }, - "CreateSSHKey": { - "methods": [ - "create_ssh_key" - ] - }, - "CreateVolumeSnapshot": { - "methods": [ - "create_volume_snapshot" - ] - }, - "DeleteNfsShare": { - "methods": [ - "delete_nfs_share" - ] - }, - "DeleteSSHKey": { - "methods": [ - "delete_ssh_key" - ] - }, - "DeleteVolumeSnapshot": { - "methods": [ - "delete_volume_snapshot" - ] - }, - "DetachLun": { - "methods": [ - "detach_lun" - ] - }, - "DisableInteractiveSerialConsole": { - "methods": [ - "disable_interactive_serial_console" - ] - }, - "EnableInteractiveSerialConsole": { - "methods": [ - "enable_interactive_serial_console" - ] - }, - "EvictLun": { - "methods": [ - "evict_lun" - ] - }, - "EvictVolume": { - "methods": [ - "evict_volume" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetLun": { - "methods": [ - "get_lun" - ] - }, - "GetNetwork": { - "methods": [ - "get_network" - ] - }, - "GetNfsShare": { - "methods": [ - "get_nfs_share" - ] - }, - "GetProvisioningConfig": { - "methods": [ - "get_provisioning_config" - ] - }, - "GetVolume": { - "methods": [ - "get_volume" - ] - }, - "GetVolumeSnapshot": { - "methods": [ - "get_volume_snapshot" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "ListLuns": { - "methods": [ - "list_luns" - ] - }, - "ListNetworkUsage": { - "methods": [ - "list_network_usage" - ] - }, - "ListNetworks": { - "methods": [ - "list_networks" - ] - }, - "ListNfsShares": { - "methods": [ - "list_nfs_shares" - ] - }, - "ListOSImages": { - "methods": [ - "list_os_images" - ] - }, - "ListProvisioningQuotas": { - "methods": [ - "list_provisioning_quotas" - ] - }, - "ListSSHKeys": { - "methods": [ - "list_ssh_keys" - ] - }, - "ListVolumeSnapshots": { - "methods": [ - "list_volume_snapshots" - ] - }, - "ListVolumes": { - "methods": [ - "list_volumes" - ] - }, - "RenameInstance": { - "methods": [ - "rename_instance" - ] - }, - "RenameNetwork": { - "methods": [ - "rename_network" - ] - }, - "RenameNfsShare": { - "methods": [ - "rename_nfs_share" - ] - }, - "RenameVolume": { - "methods": [ - "rename_volume" - ] - }, - "ResetInstance": { - "methods": [ - "reset_instance" - ] - }, - "ResizeVolume": { - "methods": [ - "resize_volume" - ] - }, - "RestoreVolumeSnapshot": { - "methods": [ - "restore_volume_snapshot" - ] - }, - "StartInstance": { - "methods": [ - "start_instance" - ] - }, - "StopInstance": { - "methods": [ - "stop_instance" - ] - }, - "SubmitProvisioningConfig": { - "methods": [ - "submit_provisioning_config" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateNetwork": { - "methods": [ - "update_network" - ] - }, - "UpdateNfsShare": { - "methods": [ - "update_nfs_share" - ] - }, - "UpdateProvisioningConfig": { - "methods": [ - "update_provisioning_config" - ] - }, - "UpdateVolume": { - "methods": [ - "update_volume" - ] - } - } - }, - "rest": { - "libraryClient": "BareMetalSolutionClient", - "rpcs": { - "CreateNfsShare": { - "methods": [ - "create_nfs_share" - ] - }, - "CreateProvisioningConfig": { - "methods": [ - "create_provisioning_config" - ] - }, - "CreateSSHKey": { - "methods": [ - "create_ssh_key" - ] - }, - "CreateVolumeSnapshot": { - "methods": [ - "create_volume_snapshot" - ] - }, - "DeleteNfsShare": { - "methods": [ - "delete_nfs_share" - ] - }, - "DeleteSSHKey": { - "methods": [ - "delete_ssh_key" - ] - }, - "DeleteVolumeSnapshot": { - "methods": [ - "delete_volume_snapshot" - ] - }, - "DetachLun": { - "methods": [ - "detach_lun" - ] - }, - "DisableInteractiveSerialConsole": { - "methods": [ - "disable_interactive_serial_console" - ] - }, - "EnableInteractiveSerialConsole": { - "methods": [ - "enable_interactive_serial_console" - ] - }, - "EvictLun": { - "methods": [ - "evict_lun" - ] - }, - "EvictVolume": { - "methods": [ - "evict_volume" - ] - }, - "GetInstance": { - "methods": [ - "get_instance" - ] - }, - "GetLun": { - "methods": [ - "get_lun" - ] - }, - "GetNetwork": { - "methods": [ - "get_network" - ] - }, - "GetNfsShare": { - "methods": [ - "get_nfs_share" - ] - }, - "GetProvisioningConfig": { - "methods": [ - "get_provisioning_config" - ] - }, - "GetVolume": { - "methods": [ - "get_volume" - ] - }, - "GetVolumeSnapshot": { - "methods": [ - "get_volume_snapshot" - ] - }, - "ListInstances": { - "methods": [ - "list_instances" - ] - }, - "ListLuns": { - "methods": [ - "list_luns" - ] - }, - "ListNetworkUsage": { - "methods": [ - "list_network_usage" - ] - }, - "ListNetworks": { - "methods": [ - "list_networks" - ] - }, - "ListNfsShares": { - "methods": [ - "list_nfs_shares" - ] - }, - "ListOSImages": { - "methods": [ - "list_os_images" - ] - }, - "ListProvisioningQuotas": { - "methods": [ - "list_provisioning_quotas" - ] - }, - "ListSSHKeys": { - "methods": [ - "list_ssh_keys" - ] - }, - "ListVolumeSnapshots": { - "methods": [ - "list_volume_snapshots" - ] - }, - "ListVolumes": { - "methods": [ - "list_volumes" - ] - }, - "RenameInstance": { - "methods": [ - "rename_instance" - ] - }, - "RenameNetwork": { - "methods": [ - "rename_network" - ] - }, - "RenameNfsShare": { - "methods": [ - "rename_nfs_share" - ] - }, - "RenameVolume": { - "methods": [ - "rename_volume" - ] - }, - "ResetInstance": { - "methods": [ - "reset_instance" - ] - }, - "ResizeVolume": { - "methods": [ - "resize_volume" - ] - }, - "RestoreVolumeSnapshot": { - "methods": [ - "restore_volume_snapshot" - ] - }, - "StartInstance": { - "methods": [ - "start_instance" - ] - }, - "StopInstance": { - "methods": [ - "stop_instance" - ] - }, - "SubmitProvisioningConfig": { - "methods": [ - "submit_provisioning_config" - ] - }, - "UpdateInstance": { - "methods": [ - "update_instance" - ] - }, - "UpdateNetwork": { - "methods": [ - "update_network" - ] - }, - "UpdateNfsShare": { - "methods": [ - "update_nfs_share" - ] - }, - "UpdateProvisioningConfig": { - "methods": [ - "update_provisioning_config" - ] - }, - "UpdateVolume": { - "methods": [ - "update_volume" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/gapic_version.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/py.typed b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/py.typed deleted file mode 100644 index 2afe13f6aed8..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bare-metal-solution package uses inline types. diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/__init__.py deleted file mode 100644 index 4d9e0db3106e..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BareMetalSolutionClient -from .async_client import BareMetalSolutionAsyncClient - -__all__ = ( - 'BareMetalSolutionClient', - 'BareMetalSolutionAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py deleted file mode 100644 index 3ba9a913ace8..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py +++ /dev/null @@ -1,5609 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bare_metal_solution_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import pagers -from google.cloud.bare_metal_solution_v2.types import baremetalsolution -from google.cloud.bare_metal_solution_v2.types import common -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BareMetalSolutionTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BareMetalSolutionGrpcAsyncIOTransport -from .client import BareMetalSolutionClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class BareMetalSolutionAsyncClient: - """Performs management operations on Bare Metal Solution servers. - - The ``baremetalsolution.googleapis.com`` service provides management - capabilities for Bare Metal Solution servers. To access the API - methods, you must assign Bare Metal Solution IAM roles containing - the desired permissions to your staff in your Google Cloud project. - You must also enable the Bare Metal Solution API. Once enabled, the - methods act upon specific servers in your Bare Metal Solution - environment. - """ - - _client: BareMetalSolutionClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = BareMetalSolutionClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BareMetalSolutionClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BareMetalSolutionClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = BareMetalSolutionClient._DEFAULT_UNIVERSE - - instance_path = staticmethod(BareMetalSolutionClient.instance_path) - parse_instance_path = staticmethod(BareMetalSolutionClient.parse_instance_path) - instance_config_path = staticmethod(BareMetalSolutionClient.instance_config_path) - parse_instance_config_path = staticmethod(BareMetalSolutionClient.parse_instance_config_path) - instance_quota_path = staticmethod(BareMetalSolutionClient.instance_quota_path) - parse_instance_quota_path = staticmethod(BareMetalSolutionClient.parse_instance_quota_path) - interconnect_attachment_path = staticmethod(BareMetalSolutionClient.interconnect_attachment_path) - parse_interconnect_attachment_path = staticmethod(BareMetalSolutionClient.parse_interconnect_attachment_path) - lun_path = staticmethod(BareMetalSolutionClient.lun_path) - parse_lun_path = staticmethod(BareMetalSolutionClient.parse_lun_path) - network_path = staticmethod(BareMetalSolutionClient.network_path) - parse_network_path = staticmethod(BareMetalSolutionClient.parse_network_path) - network_config_path = staticmethod(BareMetalSolutionClient.network_config_path) - parse_network_config_path = staticmethod(BareMetalSolutionClient.parse_network_config_path) - nfs_share_path = staticmethod(BareMetalSolutionClient.nfs_share_path) - parse_nfs_share_path = staticmethod(BareMetalSolutionClient.parse_nfs_share_path) - os_image_path = staticmethod(BareMetalSolutionClient.os_image_path) - parse_os_image_path = staticmethod(BareMetalSolutionClient.parse_os_image_path) - provisioning_config_path = staticmethod(BareMetalSolutionClient.provisioning_config_path) - parse_provisioning_config_path = staticmethod(BareMetalSolutionClient.parse_provisioning_config_path) - provisioning_quota_path = staticmethod(BareMetalSolutionClient.provisioning_quota_path) - parse_provisioning_quota_path = staticmethod(BareMetalSolutionClient.parse_provisioning_quota_path) - server_network_template_path = staticmethod(BareMetalSolutionClient.server_network_template_path) - parse_server_network_template_path = staticmethod(BareMetalSolutionClient.parse_server_network_template_path) - ssh_key_path = staticmethod(BareMetalSolutionClient.ssh_key_path) - parse_ssh_key_path = staticmethod(BareMetalSolutionClient.parse_ssh_key_path) - volume_path = staticmethod(BareMetalSolutionClient.volume_path) - parse_volume_path = staticmethod(BareMetalSolutionClient.parse_volume_path) - volume_config_path = staticmethod(BareMetalSolutionClient.volume_config_path) - parse_volume_config_path = staticmethod(BareMetalSolutionClient.parse_volume_config_path) - volume_snapshot_path = staticmethod(BareMetalSolutionClient.volume_snapshot_path) - parse_volume_snapshot_path = staticmethod(BareMetalSolutionClient.parse_volume_snapshot_path) - common_billing_account_path = staticmethod(BareMetalSolutionClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BareMetalSolutionClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BareMetalSolutionClient.common_folder_path) - parse_common_folder_path = staticmethod(BareMetalSolutionClient.parse_common_folder_path) - common_organization_path = staticmethod(BareMetalSolutionClient.common_organization_path) - parse_common_organization_path = staticmethod(BareMetalSolutionClient.parse_common_organization_path) - common_project_path = staticmethod(BareMetalSolutionClient.common_project_path) - parse_common_project_path = staticmethod(BareMetalSolutionClient.parse_common_project_path) - common_location_path = staticmethod(BareMetalSolutionClient.common_location_path) - parse_common_location_path = staticmethod(BareMetalSolutionClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BareMetalSolutionAsyncClient: The constructed client. - """ - return BareMetalSolutionClient.from_service_account_info.__func__(BareMetalSolutionAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BareMetalSolutionAsyncClient: The constructed client. - """ - return BareMetalSolutionClient.from_service_account_file.__func__(BareMetalSolutionAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BareMetalSolutionClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BareMetalSolutionTransport: - """Returns the transport used by the client instance. - - Returns: - BareMetalSolutionTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = BareMetalSolutionClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BareMetalSolutionTransport, Callable[..., BareMetalSolutionTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the bare metal solution async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BareMetalSolutionTransport,Callable[..., BareMetalSolutionTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BareMetalSolutionTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BareMetalSolutionClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.baremetalsolution_v2.BareMetalSolutionAsyncClient`.", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "credentialsType": None, - } - ) - - async def list_instances(self, - request: Optional[Union[instance.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: - r"""List servers in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_instances(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListInstancesRequest, dict]]): - The request object. Message for requesting the list of - servers. - parent (:class:`str`): - Required. Parent value for - ListInstancesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesAsyncPager: - Response message for the list of - servers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.ListInstancesRequest): - request = instance.ListInstancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInstancesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_instance(self, - request: Optional[Union[instance.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> instance.Instance: - r"""Get details about a single server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetInstanceRequest, dict]]): - The request object. Message for requesting server - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Instance: - A server. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.GetInstanceRequest): - request = instance.GetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_instance(self, - request: Optional[Union[gcb_instance.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[gcb_instance.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update details of a single server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_update_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateInstanceRequest( - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateInstanceRequest, dict]]): - The request object. Message requesting to updating a - server. - instance (:class:`google.cloud.bare_metal_solution_v2.types.Instance`): - Required. The server to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/instances/{instance} - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. The currently supported - fields are: ``labels`` ``hyperthreading_enabled`` - ``os_image`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Instance` - A server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_instance.UpdateInstanceRequest): - request = gcb_instance.UpdateInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_instance.Instance, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def rename_instance(self, - request: Optional[Union[instance.RenameInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - new_instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> instance.Instance: - r"""RenameInstance sets a new name for an instance. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_rename_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameInstanceRequest( - name="name_value", - new_instance_id="new_instance_id_value", - ) - - # Make the request - response = await client.rename_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest, dict]]): - The request object. Message requesting rename of a - server. - name (:class:`str`): - Required. The ``name`` field is used to identify the - instance. Format: - projects/{project}/locations/{location}/instances/{instance} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_instance_id (:class:`str`): - Required. The new ``id`` of the instance. - This corresponds to the ``new_instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Instance: - A server. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_instance_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.RenameInstanceRequest): - request = instance.RenameInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_instance_id is not None: - request.new_instance_id = new_instance_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reset_instance(self, - request: Optional[Union[instance.ResetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Perform an ungraceful, hard reset on a server. - Equivalent to shutting the power off and then turning it - back on. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_reset_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResetInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.reset_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest, dict]]): - The request object. Message requesting to reset a server. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.ResetInstanceResponse` - Response message from resetting a server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.ResetInstanceRequest): - request = instance.ResetInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.reset_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - baremetalsolution.ResetInstanceResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def start_instance(self, - request: Optional[Union[instance.StartInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Starts a server that was shutdown. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_start_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StartInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.start_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.StartInstanceRequest, dict]]): - The request object. Message requesting to start a server. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.StartInstanceResponse` - Response message from starting a server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.StartInstanceRequest): - request = instance.StartInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.start_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - instance.StartInstanceResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def stop_instance(self, - request: Optional[Union[instance.StopInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Stop a running server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_stop_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StopInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.stop_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.StopInstanceRequest, dict]]): - The request object. Message requesting to stop a server. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.StopInstanceResponse` - Response message from stopping a server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.StopInstanceRequest): - request = instance.StopInstanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.stop_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - instance.StopInstanceResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def enable_interactive_serial_console(self, - request: Optional[Union[instance.EnableInteractiveSerialConsoleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Enable the interactive serial console feature on an - instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_enable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.enable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest, dict]]): - The request object. Message for enabling the interactive - serial console on an instance. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleResponse` - Message for response of EnableInteractiveSerialConsole. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.EnableInteractiveSerialConsoleRequest): - request = instance.EnableInteractiveSerialConsoleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.enable_interactive_serial_console] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - instance.EnableInteractiveSerialConsoleResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def disable_interactive_serial_console(self, - request: Optional[Union[instance.DisableInteractiveSerialConsoleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Disable the interactive serial console feature on an - instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_disable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.disable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest, dict]]): - The request object. Message for disabling the interactive - serial console on an instance. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleResponse` - Message for response of DisableInteractiveSerialConsole. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.DisableInteractiveSerialConsoleRequest): - request = instance.DisableInteractiveSerialConsoleRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.disable_interactive_serial_console] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - instance.DisableInteractiveSerialConsoleResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def detach_lun(self, - request: Optional[Union[gcb_instance.DetachLunRequest, dict]] = None, - *, - instance: Optional[str] = None, - lun: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Detach LUN from Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_detach_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DetachLunRequest( - instance="instance_value", - lun="lun_value", - ) - - # Make the request - operation = client.detach_lun(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DetachLunRequest, dict]]): - The request object. Message for detach specific LUN from - an Instance. - instance (:class:`str`): - Required. Name of the instance. - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lun (:class:`str`): - Required. Name of the Lun to detach. - This corresponds to the ``lun`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Instance` - A server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, lun] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_instance.DetachLunRequest): - request = gcb_instance.DetachLunRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if lun is not None: - request.lun = lun - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.detach_lun] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance", request.instance), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_instance.Instance, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_ssh_keys(self, - request: Optional[Union[ssh_key.ListSSHKeysRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSSHKeysAsyncPager: - r"""Lists the public SSH keys registered for the - specified project. These SSH keys are used only for the - interactive serial console feature. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_ssh_keys(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListSSHKeysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_ssh_keys(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest, dict]]): - The request object. Message for listing the public SSH - keys in a project. - parent (:class:`str`): - Required. The parent containing the - SSH keys. Currently, the only valid - value for the location is "global". - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysAsyncPager: - Message for response of ListSSHKeys. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, ssh_key.ListSSHKeysRequest): - request = ssh_key.ListSSHKeysRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_ssh_keys] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSSHKeysAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_ssh_key(self, - request: Optional[Union[gcb_ssh_key.CreateSSHKeyRequest, dict]] = None, - *, - parent: Optional[str] = None, - ssh_key: Optional[gcb_ssh_key.SSHKey] = None, - ssh_key_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_ssh_key.SSHKey: - r"""Register a public SSH key in the specified project - for use with the interactive serial console feature. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_create_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateSSHKeyRequest( - parent="parent_value", - ssh_key_id="ssh_key_id_value", - ) - - # Make the request - response = await client.create_ssh_key(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest, dict]]): - The request object. Message for registering a public SSH - key in a project. - parent (:class:`str`): - Required. The parent containing the - SSH keys. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ssh_key (:class:`google.cloud.bare_metal_solution_v2.types.SSHKey`): - Required. The SSH key to register. - This corresponds to the ``ssh_key`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ssh_key_id (:class:`str`): - Required. The ID to use for the key, which will become - the final component of the key's resource name. - - This value must match the regex: [a-zA-Z0-9@.-_]{1,64} - - This corresponds to the ``ssh_key_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.SSHKey: - An SSH key, used for authorizing with - the interactive serial console feature. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, ssh_key, ssh_key_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_ssh_key.CreateSSHKeyRequest): - request = gcb_ssh_key.CreateSSHKeyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if ssh_key is not None: - request.ssh_key = ssh_key - if ssh_key_id is not None: - request.ssh_key_id = ssh_key_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_ssh_key] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_ssh_key(self, - request: Optional[Union[ssh_key.DeleteSSHKeyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a public SSH key registered in the specified - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_delete_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteSSHKeyRequest( - name="name_value", - ) - - # Make the request - await client.delete_ssh_key(request=request) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest, dict]]): - The request object. Message for deleting an SSH key from - a project. - name (:class:`str`): - Required. The name of the SSH key to - delete. Currently, the only valid value - for the location is "global". - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, ssh_key.DeleteSSHKeyRequest): - request = ssh_key.DeleteSSHKeyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_ssh_key] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_volumes(self, - request: Optional[Union[volume.ListVolumesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListVolumesAsyncPager: - r"""List storage volumes in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_volumes(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volumes(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListVolumesRequest, dict]]): - The request object. Message for requesting a list of - storage volumes. - parent (:class:`str`): - Required. Parent value for - ListVolumesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesAsyncPager: - Response message containing the list - of storage volumes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.ListVolumesRequest): - request = volume.ListVolumesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_volumes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListVolumesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_volume(self, - request: Optional[Union[volume.GetVolumeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume.Volume: - r"""Get details of a single storage volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_volume(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetVolumeRequest, dict]]): - The request object. Message for requesting storage volume - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Volume: - A storage volume. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.GetVolumeRequest): - request = volume.GetVolumeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_volume(self, - request: Optional[Union[gcb_volume.UpdateVolumeRequest, dict]] = None, - *, - volume: Optional[gcb_volume.Volume] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update details of a single storage volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_update_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateVolumeRequest( - ) - - # Make the request - operation = client.update_volume(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest, dict]]): - The request object. Message for updating a volume. - volume (:class:`google.cloud.bare_metal_solution_v2.types.Volume`): - Required. The volume to update. - - The ``name`` field is used to identify the volume to - update. Format: - projects/{project}/locations/{location}/volumes/{volume} - - This corresponds to the ``volume`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. - The only currently supported fields are: - - 'labels' - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [volume, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume.UpdateVolumeRequest): - request = gcb_volume.UpdateVolumeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if volume is not None: - request.volume = volume - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("volume.name", request.volume.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_volume.Volume, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def rename_volume(self, - request: Optional[Union[volume.RenameVolumeRequest, dict]] = None, - *, - name: Optional[str] = None, - new_volume_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume.Volume: - r"""RenameVolume sets a new name for a volume. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_rename_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameVolumeRequest( - name="name_value", - new_volume_id="new_volume_id_value", - ) - - # Make the request - response = await client.rename_volume(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest, dict]]): - The request object. Message requesting rename of a - server. - name (:class:`str`): - Required. The ``name`` field is used to identify the - volume. Format: - projects/{project}/locations/{location}/volumes/{volume} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_volume_id (:class:`str`): - Required. The new ``id`` of the volume. - This corresponds to the ``new_volume_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Volume: - A storage volume. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_volume_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.RenameVolumeRequest): - request = volume.RenameVolumeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_volume_id is not None: - request.new_volume_id = new_volume_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def evict_volume(self, - request: Optional[Union[volume.EvictVolumeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Skips volume's cooloff and deletes it now. - Volume must be in cooloff state. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_evict_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictVolumeRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_volume(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest, dict]]): - The request object. Request for skip volume cooloff and - delete it. - name (:class:`str`): - Required. The name of the Volume. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.EvictVolumeRequest): - request = volume.EvictVolumeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.evict_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def resize_volume(self, - request: Optional[Union[gcb_volume.ResizeVolumeRequest, dict]] = None, - *, - volume: Optional[str] = None, - size_gib: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Emergency Volume resize. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_resize_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResizeVolumeRequest( - volume="volume_value", - ) - - # Make the request - operation = client.resize_volume(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest, dict]]): - The request object. Request for emergency resize Volume. - volume (:class:`str`): - Required. Volume to resize. - This corresponds to the ``volume`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - size_gib (:class:`int`): - New Volume size, in GiB. - This corresponds to the ``size_gib`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [volume, size_gib] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume.ResizeVolumeRequest): - request = gcb_volume.ResizeVolumeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if volume is not None: - request.volume = volume - if size_gib is not None: - request.size_gib = size_gib - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.resize_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("volume", request.volume), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_volume.Volume, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_networks(self, - request: Optional[Union[network.ListNetworksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListNetworksAsyncPager: - r"""List network in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_networks(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_networks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNetworksRequest, dict]]): - The request object. Message for requesting a list of - networks. - parent (:class:`str`): - Required. Parent value for - ListNetworksRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksAsyncPager: - Response message containing the list - of networks. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.ListNetworksRequest): - request = network.ListNetworksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_networks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNetworksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_network_usage(self, - request: Optional[Union[network.ListNetworkUsageRequest, dict]] = None, - *, - location: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> network.ListNetworkUsageResponse: - r"""List all Networks (and used IPs for each Network) in - the vendor account associated with the specified - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_network_usage(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworkUsageRequest( - location="location_value", - ) - - # Make the request - response = await client.list_network_usage(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest, dict]]): - The request object. Request to get networks with IPs. - location (:class:`str`): - Required. Parent value (project and - location). - - This corresponds to the ``location`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse: - Response with Networks with IPs - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [location] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.ListNetworkUsageRequest): - request = network.ListNetworkUsageRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if location is not None: - request.location = location - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_network_usage] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("location", request.location), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_network(self, - request: Optional[Union[network.GetNetworkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> network.Network: - r"""Get details of a single network. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNetworkRequest( - name="name_value", - ) - - # Make the request - response = await client.get_network(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetNetworkRequest, dict]]): - The request object. Message for requesting network - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Network: - A Network. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.GetNetworkRequest): - request = network.GetNetworkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_network] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_network(self, - request: Optional[Union[gcb_network.UpdateNetworkRequest, dict]] = None, - *, - network: Optional[gcb_network.Network] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update details of a single network. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_update_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNetworkRequest( - ) - - # Make the request - operation = client.update_network(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest, dict]]): - The request object. Message requesting to updating a - network. - network (:class:`google.cloud.bare_metal_solution_v2.types.Network`): - Required. The network to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/networks/{network} - - This corresponds to the ``network`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. The only currently - supported fields are: ``labels``, ``reservations``, - ``vrf.vlan_attachments`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Network` - A Network. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [network, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_network.UpdateNetworkRequest): - request = gcb_network.UpdateNetworkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if network is not None: - request.network = network - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_network] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("network.name", request.network.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_network.Network, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_volume_snapshot(self, - request: Optional[Union[gcb_volume_snapshot.CreateVolumeSnapshotRequest, dict]] = None, - *, - parent: Optional[str] = None, - volume_snapshot: Optional[gcb_volume_snapshot.VolumeSnapshot] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_volume_snapshot.VolumeSnapshot: - r"""Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if - called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_create_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_volume_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest, dict]]): - The request object. Message for creating a volume - snapshot. - parent (:class:`str`): - Required. The volume to snapshot. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - volume_snapshot (:class:`google.cloud.bare_metal_solution_v2.types.VolumeSnapshot`): - Required. The snapshot to create. - This corresponds to the ``volume_snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: - A snapshot of a volume. Only boot - volumes can have snapshots. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, volume_snapshot] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume_snapshot.CreateVolumeSnapshotRequest): - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if volume_snapshot is not None: - request.volume_snapshot = volume_snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def restore_volume_snapshot(self, - request: Optional[Union[gcb_volume_snapshot.RestoreVolumeSnapshotRequest, dict]] = None, - *, - volume_snapshot: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Uses the specified snapshot to restore its parent volume. - Returns INVALID_ARGUMENT if called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_restore_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( - volume_snapshot="volume_snapshot_value", - ) - - # Make the request - operation = client.restore_volume_snapshot(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest, dict]]): - The request object. Message for restoring a volume - snapshot. - volume_snapshot (:class:`str`): - Required. Name of the snapshot which - will be used to restore its parent - volume. - - This corresponds to the ``volume_snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.VolumeSnapshot` - A snapshot of a volume. Only boot volumes can have - snapshots. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [volume_snapshot] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume_snapshot.RestoreVolumeSnapshotRequest): - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if volume_snapshot is not None: - request.volume_snapshot = volume_snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.restore_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("volume_snapshot", request.volume_snapshot), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_volume_snapshot.VolumeSnapshot, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_volume_snapshot(self, - request: Optional[Union[volume_snapshot.DeleteVolumeSnapshotRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a volume snapshot. Returns INVALID_ARGUMENT if called - for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_delete_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - await client.delete_volume_snapshot(request=request) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest, dict]]): - The request object. Message for deleting named Volume - snapshot. - name (:class:`str`): - Required. The name of the snapshot to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume_snapshot.DeleteVolumeSnapshotRequest): - request = volume_snapshot.DeleteVolumeSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_volume_snapshot(self, - request: Optional[Union[volume_snapshot.GetVolumeSnapshotRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume_snapshot.VolumeSnapshot: - r"""Returns the specified snapshot resource. Returns - INVALID_ARGUMENT if called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - response = await client.get_volume_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest, dict]]): - The request object. Message for requesting volume - snapshot information. - name (:class:`str`): - Required. The name of the snapshot. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: - A snapshot of a volume. Only boot - volumes can have snapshots. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume_snapshot.GetVolumeSnapshotRequest): - request = volume_snapshot.GetVolumeSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_volume_snapshots(self, - request: Optional[Union[volume_snapshot.ListVolumeSnapshotsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListVolumeSnapshotsAsyncPager: - r"""Retrieves the list of snapshots for the specified - volume. Returns a response with an empty list of - snapshots if called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_volume_snapshots(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volume_snapshots(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest, dict]]): - The request object. Message for requesting a list of - volume snapshots. - parent (:class:`str`): - Required. Parent value for - ListVolumesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsAsyncPager: - Response message containing the list - of volume snapshots. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume_snapshot.ListVolumeSnapshotsRequest): - request = volume_snapshot.ListVolumeSnapshotsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_volume_snapshots] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListVolumeSnapshotsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_lun(self, - request: Optional[Union[lun.GetLunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> lun.Lun: - r"""Get details of a single storage logical unit - number(LUN). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetLunRequest( - name="name_value", - ) - - # Make the request - response = await client.get_lun(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetLunRequest, dict]]): - The request object. Message for requesting storage lun - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Lun: - A storage volume logical unit number - (LUN). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, lun.GetLunRequest): - request = lun.GetLunRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_lun] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_luns(self, - request: Optional[Union[lun.ListLunsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLunsAsyncPager: - r"""List storage volume luns for given storage volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_luns(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListLunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_luns(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListLunsRequest, dict]]): - The request object. Message for requesting a list of - storage volume luns. - parent (:class:`str`): - Required. Parent value for - ListLunsRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsAsyncPager: - Response message containing the list - of storage volume luns. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, lun.ListLunsRequest): - request = lun.ListLunsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_luns] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLunsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def evict_lun(self, - request: Optional[Union[lun.EvictLunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Skips lun's cooloff and deletes it now. - Lun must be in cooloff state. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_evict_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictLunRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_lun(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.EvictLunRequest, dict]]): - The request object. Request for skip lun cooloff and - delete it. - name (:class:`str`): - Required. The name of the lun. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, lun.EvictLunRequest): - request = lun.EvictLunRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.evict_lun] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_nfs_share(self, - request: Optional[Union[nfs_share.GetNfsShareRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> nfs_share.NfsShare: - r"""Get details of a single NFS share. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNfsShareRequest( - name="name_value", - ) - - # Make the request - response = await client.get_nfs_share(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest, dict]]): - The request object. Message for requesting NFS share - information. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.NfsShare: - An NFS share. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.GetNfsShareRequest): - request = nfs_share.GetNfsShareRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_nfs_shares(self, - request: Optional[Union[nfs_share.ListNfsSharesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListNfsSharesAsyncPager: - r"""List NFS shares. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_nfs_shares(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNfsSharesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_nfs_shares(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest, dict]]): - The request object. Message for requesting a list of NFS - shares. - parent (:class:`str`): - Required. Parent value for - ListNfsSharesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesAsyncPager: - Response message containing the list - of NFS shares. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.ListNfsSharesRequest): - request = nfs_share.ListNfsSharesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_nfs_shares] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListNfsSharesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_nfs_share(self, - request: Optional[Union[gcb_nfs_share.UpdateNfsShareRequest, dict]] = None, - *, - nfs_share: Optional[gcb_nfs_share.NfsShare] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Update details of a single NFS share. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_update_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNfsShareRequest( - ) - - # Make the request - operation = client.update_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest, dict]]): - The request object. Message requesting to updating an NFS - share. - nfs_share (:class:`google.cloud.bare_metal_solution_v2.types.NfsShare`): - Required. The NFS share to update. - - The ``name`` field is used to identify the NFS share to - update. Format: - projects/{project}/locations/{location}/nfsShares/{nfs_share} - - This corresponds to the ``nfs_share`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. The only currently - supported fields are: ``labels`` ``allowed_clients`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` - An NFS share. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [nfs_share, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_nfs_share.UpdateNfsShareRequest): - request = gcb_nfs_share.UpdateNfsShareRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if nfs_share is not None: - request.nfs_share = nfs_share - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("nfs_share.name", request.nfs_share.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_nfs_share.NfsShare, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def create_nfs_share(self, - request: Optional[Union[gcb_nfs_share.CreateNfsShareRequest, dict]] = None, - *, - parent: Optional[str] = None, - nfs_share: Optional[gcb_nfs_share.NfsShare] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create an NFS share. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_create_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateNfsShareRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest, dict]]): - The request object. Message for creating an NFS share. - parent (:class:`str`): - Required. The parent project and - location. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - nfs_share (:class:`google.cloud.bare_metal_solution_v2.types.NfsShare`): - Required. The NfsShare to create. - This corresponds to the ``nfs_share`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` - An NFS share. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, nfs_share] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_nfs_share.CreateNfsShareRequest): - request = gcb_nfs_share.CreateNfsShareRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if nfs_share is not None: - request.nfs_share = nfs_share - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - gcb_nfs_share.NfsShare, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def rename_nfs_share(self, - request: Optional[Union[nfs_share.RenameNfsShareRequest, dict]] = None, - *, - name: Optional[str] = None, - new_nfsshare_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> nfs_share.NfsShare: - r"""RenameNfsShare sets a new name for an nfsshare. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_rename_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNfsShareRequest( - name="name_value", - new_nfsshare_id="new_nfsshare_id_value", - ) - - # Make the request - response = await client.rename_nfs_share(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest, dict]]): - The request object. Message requesting rename of a - server. - name (:class:`str`): - Required. The ``name`` field is used to identify the - nfsshare. Format: - projects/{project}/locations/{location}/nfsshares/{nfsshare} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_nfsshare_id (:class:`str`): - Required. The new ``id`` of the nfsshare. - This corresponds to the ``new_nfsshare_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.NfsShare: - An NFS share. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_nfsshare_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.RenameNfsShareRequest): - request = nfs_share.RenameNfsShareRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_nfsshare_id is not None: - request.new_nfsshare_id = new_nfsshare_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_nfs_share(self, - request: Optional[Union[nfs_share.DeleteNfsShareRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete an NFS share. The underlying volume is - automatically deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_delete_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteNfsShareRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest, dict]]): - The request object. Message for deleting an NFS share. - name (:class:`str`): - Required. The name of the NFS share - to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.DeleteNfsShareRequest): - request = nfs_share.DeleteNfsShareRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_provisioning_quotas(self, - request: Optional[Union[provisioning.ListProvisioningQuotasRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProvisioningQuotasAsyncPager: - r"""List the budget details to provision resources on a - given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_provisioning_quotas(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListProvisioningQuotasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_provisioning_quotas(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest, dict]]): - The request object. Message for requesting the list of - provisioning quotas. - parent (:class:`str`): - Required. Parent value for - ListProvisioningQuotasRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasAsyncPager: - Response message for the list of - provisioning quotas. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.ListProvisioningQuotasRequest): - request = provisioning.ListProvisioningQuotasRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_provisioning_quotas] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListProvisioningQuotasAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def submit_provisioning_config(self, - request: Optional[Union[provisioning.SubmitProvisioningConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - provisioning_config: Optional[provisioning.ProvisioningConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.SubmitProvisioningConfigResponse: - r"""Submit a provisiong configuration for a given - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_submit_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = await client.submit_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest, dict]]): - The request object. Request for SubmitProvisioningConfig. - parent (:class:`str`): - Required. The parent project and - location containing the - ProvisioningConfig. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - provisioning_config (:class:`google.cloud.bare_metal_solution_v2.types.ProvisioningConfig`): - Required. The ProvisioningConfig to - create. - - This corresponds to the ``provisioning_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse: - Response for - SubmitProvisioningConfig. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, provisioning_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.SubmitProvisioningConfigRequest): - request = provisioning.SubmitProvisioningConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if provisioning_config is not None: - request.provisioning_config = provisioning_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.submit_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_provisioning_config(self, - request: Optional[Union[provisioning.GetProvisioningConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.ProvisioningConfig: - r"""Get ProvisioningConfig by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_get_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetProvisioningConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest, dict]]): - The request object. Request for GetProvisioningConfig. - name (:class:`str`): - Required. Name of the - ProvisioningConfig. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: - A provisioning configuration. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.GetProvisioningConfigRequest): - request = provisioning.GetProvisioningConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_provisioning_config(self, - request: Optional[Union[provisioning.CreateProvisioningConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - provisioning_config: Optional[provisioning.ProvisioningConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.ProvisioningConfig: - r"""Create new ProvisioningConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_create_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest, dict]]): - The request object. Request for CreateProvisioningConfig. - parent (:class:`str`): - Required. The parent project and - location containing the - ProvisioningConfig. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - provisioning_config (:class:`google.cloud.bare_metal_solution_v2.types.ProvisioningConfig`): - Required. The ProvisioningConfig to - create. - - This corresponds to the ``provisioning_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: - A provisioning configuration. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, provisioning_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.CreateProvisioningConfigRequest): - request = provisioning.CreateProvisioningConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if provisioning_config is not None: - request.provisioning_config = provisioning_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_provisioning_config(self, - request: Optional[Union[provisioning.UpdateProvisioningConfigRequest, dict]] = None, - *, - provisioning_config: Optional[provisioning.ProvisioningConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.ProvisioningConfig: - r"""Update existing ProvisioningConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_update_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( - ) - - # Make the request - response = await client.update_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest, dict]]): - The request object. Message for updating a - ProvisioningConfig. - provisioning_config (:class:`google.cloud.bare_metal_solution_v2.types.ProvisioningConfig`): - Required. The ProvisioningConfig to - update. - - This corresponds to the ``provisioning_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. The list of fields to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: - A provisioning configuration. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [provisioning_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.UpdateProvisioningConfigRequest): - request = provisioning.UpdateProvisioningConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if provisioning_config is not None: - request.provisioning_config = provisioning_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("provisioning_config.name", request.provisioning_config.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_network(self, - request: Optional[Union[network.RenameNetworkRequest, dict]] = None, - *, - name: Optional[str] = None, - new_network_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> network.Network: - r"""RenameNetwork sets a new name for a network. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_rename_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNetworkRequest( - name="name_value", - new_network_id="new_network_id_value", - ) - - # Make the request - response = await client.rename_network(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest, dict]]): - The request object. Message requesting rename of a - server. - name (:class:`str`): - Required. The ``name`` field is used to identify the - network. Format: - projects/{project}/locations/{location}/networks/{network} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_network_id (:class:`str`): - Required. The new ``id`` of the network. - This corresponds to the ``new_network_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Network: - A Network. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_network_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.RenameNetworkRequest): - request = network.RenameNetworkRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_network_id is not None: - request.new_network_id = new_network_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_network] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_os_images(self, - request: Optional[Union[osimage.ListOSImagesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListOSImagesAsyncPager: - r"""Retrieves the list of OS images which are currently - approved. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - async def sample_list_os_images(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListOSImagesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_os_images(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest, dict]]): - The request object. Request for getting all available OS - images. - parent (:class:`str`): - Required. Parent value for - ListProvisioningQuotasRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesAsyncPager: - Request for getting all available OS - images. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, osimage.ListOSImagesRequest): - request = osimage.ListOSImagesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_os_images] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListOSImagesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "BareMetalSolutionAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BareMetalSolutionAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py deleted file mode 100644 index 7dab7dd120d1..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ /dev/null @@ -1,6065 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bare_metal_solution_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import pagers -from google.cloud.bare_metal_solution_v2.types import baremetalsolution -from google.cloud.bare_metal_solution_v2.types import common -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BareMetalSolutionTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BareMetalSolutionGrpcTransport -from .transports.grpc_asyncio import BareMetalSolutionGrpcAsyncIOTransport -from .transports.rest import BareMetalSolutionRestTransport - - -class BareMetalSolutionClientMeta(type): - """Metaclass for the BareMetalSolution client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BareMetalSolutionTransport]] - _transport_registry["grpc"] = BareMetalSolutionGrpcTransport - _transport_registry["grpc_asyncio"] = BareMetalSolutionGrpcAsyncIOTransport - _transport_registry["rest"] = BareMetalSolutionRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BareMetalSolutionTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BareMetalSolutionClient(metaclass=BareMetalSolutionClientMeta): - """Performs management operations on Bare Metal Solution servers. - - The ``baremetalsolution.googleapis.com`` service provides management - capabilities for Bare Metal Solution servers. To access the API - methods, you must assign Bare Metal Solution IAM roles containing - the desired permissions to your staff in your Google Cloud project. - You must also enable the Bare Metal Solution API. Once enabled, the - methods act upon specific servers in your Bare Metal Solution - environment. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "baremetalsolution.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "baremetalsolution.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BareMetalSolutionClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BareMetalSolutionClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BareMetalSolutionTransport: - """Returns the transport used by the client instance. - - Returns: - BareMetalSolutionTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: - """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) - - @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: - """Parses a instance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_config_path(project: str,location: str,instance_config: str,) -> str: - """Returns a fully-qualified instance_config string.""" - return "projects/{project}/locations/{location}/instanceConfigs/{instance_config}".format(project=project, location=location, instance_config=instance_config, ) - - @staticmethod - def parse_instance_config_path(path: str) -> Dict[str,str]: - """Parses a instance_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instanceConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def instance_quota_path(project: str,location: str,instance_quota: str,) -> str: - """Returns a fully-qualified instance_quota string.""" - return "projects/{project}/locations/{location}/instanceQuotas/{instance_quota}".format(project=project, location=location, instance_quota=instance_quota, ) - - @staticmethod - def parse_instance_quota_path(path: str) -> Dict[str,str]: - """Parses a instance_quota path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instanceQuotas/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def interconnect_attachment_path(project: str,region: str,interconnect_attachment: str,) -> str: - """Returns a fully-qualified interconnect_attachment string.""" - return "projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format(project=project, region=region, interconnect_attachment=interconnect_attachment, ) - - @staticmethod - def parse_interconnect_attachment_path(path: str) -> Dict[str,str]: - """Parses a interconnect_attachment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/interconnectAttachments/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def lun_path(project: str,location: str,volume: str,lun: str,) -> str: - """Returns a fully-qualified lun string.""" - return "projects/{project}/locations/{location}/volumes/{volume}/luns/{lun}".format(project=project, location=location, volume=volume, lun=lun, ) - - @staticmethod - def parse_lun_path(path: str) -> Dict[str,str]: - """Parses a lun path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/volumes/(?P.+?)/luns/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def network_path(project: str,location: str,network: str,) -> str: - """Returns a fully-qualified network string.""" - return "projects/{project}/locations/{location}/networks/{network}".format(project=project, location=location, network=network, ) - - @staticmethod - def parse_network_path(path: str) -> Dict[str,str]: - """Parses a network path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/networks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def network_config_path(project: str,location: str,network_config: str,) -> str: - """Returns a fully-qualified network_config string.""" - return "projects/{project}/locations/{location}/networkConfigs/{network_config}".format(project=project, location=location, network_config=network_config, ) - - @staticmethod - def parse_network_config_path(path: str) -> Dict[str,str]: - """Parses a network_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/networkConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def nfs_share_path(project: str,location: str,nfs_share: str,) -> str: - """Returns a fully-qualified nfs_share string.""" - return "projects/{project}/locations/{location}/nfsShares/{nfs_share}".format(project=project, location=location, nfs_share=nfs_share, ) - - @staticmethod - def parse_nfs_share_path(path: str) -> Dict[str,str]: - """Parses a nfs_share path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/nfsShares/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def os_image_path(project: str,location: str,os_image: str,) -> str: - """Returns a fully-qualified os_image string.""" - return "projects/{project}/locations/{location}/osImages/{os_image}".format(project=project, location=location, os_image=os_image, ) - - @staticmethod - def parse_os_image_path(path: str) -> Dict[str,str]: - """Parses a os_image path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/osImages/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def provisioning_config_path(project: str,location: str,provisioning_config: str,) -> str: - """Returns a fully-qualified provisioning_config string.""" - return "projects/{project}/locations/{location}/provisioningConfigs/{provisioning_config}".format(project=project, location=location, provisioning_config=provisioning_config, ) - - @staticmethod - def parse_provisioning_config_path(path: str) -> Dict[str,str]: - """Parses a provisioning_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/provisioningConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def provisioning_quota_path(project: str,location: str,provisioning_quota: str,) -> str: - """Returns a fully-qualified provisioning_quota string.""" - return "projects/{project}/locations/{location}/provisioningQuotas/{provisioning_quota}".format(project=project, location=location, provisioning_quota=provisioning_quota, ) - - @staticmethod - def parse_provisioning_quota_path(path: str) -> Dict[str,str]: - """Parses a provisioning_quota path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/provisioningQuotas/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def server_network_template_path(project: str,location: str,server_network_template: str,) -> str: - """Returns a fully-qualified server_network_template string.""" - return "projects/{project}/locations/{location}/serverNetworkTemplate/{server_network_template}".format(project=project, location=location, server_network_template=server_network_template, ) - - @staticmethod - def parse_server_network_template_path(path: str) -> Dict[str,str]: - """Parses a server_network_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/serverNetworkTemplate/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def ssh_key_path(project: str,location: str,ssh_key: str,) -> str: - """Returns a fully-qualified ssh_key string.""" - return "projects/{project}/locations/{location}/sshKeys/{ssh_key}".format(project=project, location=location, ssh_key=ssh_key, ) - - @staticmethod - def parse_ssh_key_path(path: str) -> Dict[str,str]: - """Parses a ssh_key path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/sshKeys/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def volume_path(project: str,location: str,volume: str,) -> str: - """Returns a fully-qualified volume string.""" - return "projects/{project}/locations/{location}/volumes/{volume}".format(project=project, location=location, volume=volume, ) - - @staticmethod - def parse_volume_path(path: str) -> Dict[str,str]: - """Parses a volume path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/volumes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def volume_config_path(project: str,location: str,volume_config: str,) -> str: - """Returns a fully-qualified volume_config string.""" - return "projects/{project}/locations/{location}/volumeConfigs/{volume_config}".format(project=project, location=location, volume_config=volume_config, ) - - @staticmethod - def parse_volume_config_path(path: str) -> Dict[str,str]: - """Parses a volume_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/volumeConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def volume_snapshot_path(project: str,location: str,volume: str,snapshot: str,) -> str: - """Returns a fully-qualified volume_snapshot string.""" - return "projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot}".format(project=project, location=location, volume=volume, snapshot=snapshot, ) - - @staticmethod - def parse_volume_snapshot_path(path: str) -> Dict[str,str]: - """Parses a volume_snapshot path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/volumes/(?P.+?)/snapshots/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = BareMetalSolutionClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = BareMetalSolutionClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = BareMetalSolutionClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = BareMetalSolutionClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BareMetalSolutionTransport, Callable[..., BareMetalSolutionTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the bare metal solution client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BareMetalSolutionTransport,Callable[..., BareMetalSolutionTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BareMetalSolutionTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BareMetalSolutionClient._read_environment_variables() - self._client_cert_source = BareMetalSolutionClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BareMetalSolutionClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, BareMetalSolutionTransport) - if transport_provided: - # transport is a BareMetalSolutionTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(BareMetalSolutionTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - BareMetalSolutionClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[BareMetalSolutionTransport], Callable[..., BareMetalSolutionTransport]] = ( - BareMetalSolutionClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., BareMetalSolutionTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.baremetalsolution_v2.BareMetalSolutionClient`.", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "credentialsType": None, - } - ) - - def list_instances(self, - request: Optional[Union[instance.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: - r"""List servers in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_instances(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListInstancesRequest, dict]): - The request object. Message for requesting the list of - servers. - parent (str): - Required. Parent value for - ListInstancesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesPager: - Response message for the list of - servers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.ListInstancesRequest): - request = instance.ListInstancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_instances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInstancesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_instance(self, - request: Optional[Union[instance.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> instance.Instance: - r"""Get details about a single server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetInstanceRequest, dict]): - The request object. Message for requesting server - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Instance: - A server. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.GetInstanceRequest): - request = instance.GetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_instance(self, - request: Optional[Union[gcb_instance.UpdateInstanceRequest, dict]] = None, - *, - instance: Optional[gcb_instance.Instance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update details of a single server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_update_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateInstanceRequest( - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateInstanceRequest, dict]): - The request object. Message requesting to updating a - server. - instance (google.cloud.bare_metal_solution_v2.types.Instance): - Required. The server to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/instances/{instance} - - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The currently supported - fields are: ``labels`` ``hyperthreading_enabled`` - ``os_image`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Instance` - A server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_instance.UpdateInstanceRequest): - request = gcb_instance.UpdateInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance.name", request.instance.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_instance.Instance, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def rename_instance(self, - request: Optional[Union[instance.RenameInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - new_instance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> instance.Instance: - r"""RenameInstance sets a new name for an instance. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_rename_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameInstanceRequest( - name="name_value", - new_instance_id="new_instance_id_value", - ) - - # Make the request - response = client.rename_instance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest, dict]): - The request object. Message requesting rename of a - server. - name (str): - Required. The ``name`` field is used to identify the - instance. Format: - projects/{project}/locations/{location}/instances/{instance} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_instance_id (str): - Required. The new ``id`` of the instance. - This corresponds to the ``new_instance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Instance: - A server. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_instance_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.RenameInstanceRequest): - request = instance.RenameInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_instance_id is not None: - request.new_instance_id = new_instance_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reset_instance(self, - request: Optional[Union[instance.ResetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Perform an ungraceful, hard reset on a server. - Equivalent to shutting the power off and then turning it - back on. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_reset_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResetInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.reset_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest, dict]): - The request object. Message requesting to reset a server. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.ResetInstanceResponse` - Response message from resetting a server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.ResetInstanceRequest): - request = instance.ResetInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reset_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - baremetalsolution.ResetInstanceResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def start_instance(self, - request: Optional[Union[instance.StartInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Starts a server that was shutdown. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_start_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StartInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.start_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.StartInstanceRequest, dict]): - The request object. Message requesting to start a server. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.StartInstanceResponse` - Response message from starting a server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.StartInstanceRequest): - request = instance.StartInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.start_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - instance.StartInstanceResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def stop_instance(self, - request: Optional[Union[instance.StopInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Stop a running server. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_stop_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StopInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.stop_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.StopInstanceRequest, dict]): - The request object. Message requesting to stop a server. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.StopInstanceResponse` - Response message from stopping a server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.StopInstanceRequest): - request = instance.StopInstanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.stop_instance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - instance.StopInstanceResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def enable_interactive_serial_console(self, - request: Optional[Union[instance.EnableInteractiveSerialConsoleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Enable the interactive serial console feature on an - instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_enable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.enable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest, dict]): - The request object. Message for enabling the interactive - serial console on an instance. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleResponse` - Message for response of EnableInteractiveSerialConsole. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.EnableInteractiveSerialConsoleRequest): - request = instance.EnableInteractiveSerialConsoleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.enable_interactive_serial_console] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - instance.EnableInteractiveSerialConsoleResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def disable_interactive_serial_console(self, - request: Optional[Union[instance.DisableInteractiveSerialConsoleRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Disable the interactive serial console feature on an - instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_disable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.disable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest, dict]): - The request object. Message for disabling the interactive - serial console on an instance. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleResponse` - Message for response of DisableInteractiveSerialConsole. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, instance.DisableInteractiveSerialConsoleRequest): - request = instance.DisableInteractiveSerialConsoleRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.disable_interactive_serial_console] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - instance.DisableInteractiveSerialConsoleResponse, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def detach_lun(self, - request: Optional[Union[gcb_instance.DetachLunRequest, dict]] = None, - *, - instance: Optional[str] = None, - lun: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Detach LUN from Instance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_detach_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DetachLunRequest( - instance="instance_value", - lun="lun_value", - ) - - # Make the request - operation = client.detach_lun(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.DetachLunRequest, dict]): - The request object. Message for detach specific LUN from - an Instance. - instance (str): - Required. Name of the instance. - This corresponds to the ``instance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lun (str): - Required. Name of the Lun to detach. - This corresponds to the ``lun`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Instance` - A server. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [instance, lun] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_instance.DetachLunRequest): - request = gcb_instance.DetachLunRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if instance is not None: - request.instance = instance - if lun is not None: - request.lun = lun - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.detach_lun] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("instance", request.instance), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_instance.Instance, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_ssh_keys(self, - request: Optional[Union[ssh_key.ListSSHKeysRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSSHKeysPager: - r"""Lists the public SSH keys registered for the - specified project. These SSH keys are used only for the - interactive serial console feature. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_ssh_keys(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListSSHKeysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_ssh_keys(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest, dict]): - The request object. Message for listing the public SSH - keys in a project. - parent (str): - Required. The parent containing the - SSH keys. Currently, the only valid - value for the location is "global". - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysPager: - Message for response of ListSSHKeys. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, ssh_key.ListSSHKeysRequest): - request = ssh_key.ListSSHKeysRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_ssh_keys] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSSHKeysPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_ssh_key(self, - request: Optional[Union[gcb_ssh_key.CreateSSHKeyRequest, dict]] = None, - *, - parent: Optional[str] = None, - ssh_key: Optional[gcb_ssh_key.SSHKey] = None, - ssh_key_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_ssh_key.SSHKey: - r"""Register a public SSH key in the specified project - for use with the interactive serial console feature. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_create_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateSSHKeyRequest( - parent="parent_value", - ssh_key_id="ssh_key_id_value", - ) - - # Make the request - response = client.create_ssh_key(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest, dict]): - The request object. Message for registering a public SSH - key in a project. - parent (str): - Required. The parent containing the - SSH keys. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ssh_key (google.cloud.bare_metal_solution_v2.types.SSHKey): - Required. The SSH key to register. - This corresponds to the ``ssh_key`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ssh_key_id (str): - Required. The ID to use for the key, which will become - the final component of the key's resource name. - - This value must match the regex: [a-zA-Z0-9@.-_]{1,64} - - This corresponds to the ``ssh_key_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.SSHKey: - An SSH key, used for authorizing with - the interactive serial console feature. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, ssh_key, ssh_key_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_ssh_key.CreateSSHKeyRequest): - request = gcb_ssh_key.CreateSSHKeyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if ssh_key is not None: - request.ssh_key = ssh_key - if ssh_key_id is not None: - request.ssh_key_id = ssh_key_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_ssh_key] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_ssh_key(self, - request: Optional[Union[ssh_key.DeleteSSHKeyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a public SSH key registered in the specified - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_delete_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteSSHKeyRequest( - name="name_value", - ) - - # Make the request - client.delete_ssh_key(request=request) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest, dict]): - The request object. Message for deleting an SSH key from - a project. - name (str): - Required. The name of the SSH key to - delete. Currently, the only valid value - for the location is "global". - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, ssh_key.DeleteSSHKeyRequest): - request = ssh_key.DeleteSSHKeyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_ssh_key] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_volumes(self, - request: Optional[Union[volume.ListVolumesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListVolumesPager: - r"""List storage volumes in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_volumes(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volumes(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListVolumesRequest, dict]): - The request object. Message for requesting a list of - storage volumes. - parent (str): - Required. Parent value for - ListVolumesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesPager: - Response message containing the list - of storage volumes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.ListVolumesRequest): - request = volume.ListVolumesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_volumes] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListVolumesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_volume(self, - request: Optional[Union[volume.GetVolumeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume.Volume: - r"""Get details of a single storage volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeRequest( - name="name_value", - ) - - # Make the request - response = client.get_volume(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetVolumeRequest, dict]): - The request object. Message for requesting storage volume - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Volume: - A storage volume. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.GetVolumeRequest): - request = volume.GetVolumeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_volume(self, - request: Optional[Union[gcb_volume.UpdateVolumeRequest, dict]] = None, - *, - volume: Optional[gcb_volume.Volume] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update details of a single storage volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_update_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateVolumeRequest( - ) - - # Make the request - operation = client.update_volume(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest, dict]): - The request object. Message for updating a volume. - volume (google.cloud.bare_metal_solution_v2.types.Volume): - Required. The volume to update. - - The ``name`` field is used to identify the volume to - update. Format: - projects/{project}/locations/{location}/volumes/{volume} - - This corresponds to the ``volume`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - The only currently supported fields are: - - 'labels' - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [volume, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume.UpdateVolumeRequest): - request = gcb_volume.UpdateVolumeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if volume is not None: - request.volume = volume - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("volume.name", request.volume.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_volume.Volume, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def rename_volume(self, - request: Optional[Union[volume.RenameVolumeRequest, dict]] = None, - *, - name: Optional[str] = None, - new_volume_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume.Volume: - r"""RenameVolume sets a new name for a volume. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_rename_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameVolumeRequest( - name="name_value", - new_volume_id="new_volume_id_value", - ) - - # Make the request - response = client.rename_volume(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest, dict]): - The request object. Message requesting rename of a - server. - name (str): - Required. The ``name`` field is used to identify the - volume. Format: - projects/{project}/locations/{location}/volumes/{volume} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_volume_id (str): - Required. The new ``id`` of the volume. - This corresponds to the ``new_volume_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Volume: - A storage volume. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_volume_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.RenameVolumeRequest): - request = volume.RenameVolumeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_volume_id is not None: - request.new_volume_id = new_volume_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def evict_volume(self, - request: Optional[Union[volume.EvictVolumeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Skips volume's cooloff and deletes it now. - Volume must be in cooloff state. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_evict_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictVolumeRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_volume(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest, dict]): - The request object. Request for skip volume cooloff and - delete it. - name (str): - Required. The name of the Volume. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume.EvictVolumeRequest): - request = volume.EvictVolumeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.evict_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def resize_volume(self, - request: Optional[Union[gcb_volume.ResizeVolumeRequest, dict]] = None, - *, - volume: Optional[str] = None, - size_gib: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Emergency Volume resize. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_resize_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResizeVolumeRequest( - volume="volume_value", - ) - - # Make the request - operation = client.resize_volume(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest, dict]): - The request object. Request for emergency resize Volume. - volume (str): - Required. Volume to resize. - This corresponds to the ``volume`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - size_gib (int): - New Volume size, in GiB. - This corresponds to the ``size_gib`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Volume` - A storage volume. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [volume, size_gib] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume.ResizeVolumeRequest): - request = gcb_volume.ResizeVolumeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if volume is not None: - request.volume = volume - if size_gib is not None: - request.size_gib = size_gib - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resize_volume] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("volume", request.volume), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_volume.Volume, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_networks(self, - request: Optional[Union[network.ListNetworksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListNetworksPager: - r"""List network in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_networks(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_networks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListNetworksRequest, dict]): - The request object. Message for requesting a list of - networks. - parent (str): - Required. Parent value for - ListNetworksRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksPager: - Response message containing the list - of networks. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.ListNetworksRequest): - request = network.ListNetworksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_networks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNetworksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_network_usage(self, - request: Optional[Union[network.ListNetworkUsageRequest, dict]] = None, - *, - location: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> network.ListNetworkUsageResponse: - r"""List all Networks (and used IPs for each Network) in - the vendor account associated with the specified - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_network_usage(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworkUsageRequest( - location="location_value", - ) - - # Make the request - response = client.list_network_usage(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest, dict]): - The request object. Request to get networks with IPs. - location (str): - Required. Parent value (project and - location). - - This corresponds to the ``location`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse: - Response with Networks with IPs - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [location] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.ListNetworkUsageRequest): - request = network.ListNetworkUsageRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if location is not None: - request.location = location - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_network_usage] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("location", request.location), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_network(self, - request: Optional[Union[network.GetNetworkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> network.Network: - r"""Get details of a single network. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNetworkRequest( - name="name_value", - ) - - # Make the request - response = client.get_network(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetNetworkRequest, dict]): - The request object. Message for requesting network - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Network: - A Network. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.GetNetworkRequest): - request = network.GetNetworkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_network] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_network(self, - request: Optional[Union[gcb_network.UpdateNetworkRequest, dict]] = None, - *, - network: Optional[gcb_network.Network] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update details of a single network. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_update_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNetworkRequest( - ) - - # Make the request - operation = client.update_network(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest, dict]): - The request object. Message requesting to updating a - network. - network (google.cloud.bare_metal_solution_v2.types.Network): - Required. The network to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/networks/{network} - - This corresponds to the ``network`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently - supported fields are: ``labels``, ``reservations``, - ``vrf.vlan_attachments`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.Network` - A Network. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [network, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_network.UpdateNetworkRequest): - request = gcb_network.UpdateNetworkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if network is not None: - request.network = network - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_network] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("network.name", request.network.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_network.Network, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_volume_snapshot(self, - request: Optional[Union[gcb_volume_snapshot.CreateVolumeSnapshotRequest, dict]] = None, - *, - parent: Optional[str] = None, - volume_snapshot: Optional[gcb_volume_snapshot.VolumeSnapshot] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_volume_snapshot.VolumeSnapshot: - r"""Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if - called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_create_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_volume_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest, dict]): - The request object. Message for creating a volume - snapshot. - parent (str): - Required. The volume to snapshot. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - volume_snapshot (google.cloud.bare_metal_solution_v2.types.VolumeSnapshot): - Required. The snapshot to create. - This corresponds to the ``volume_snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: - A snapshot of a volume. Only boot - volumes can have snapshots. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, volume_snapshot] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume_snapshot.CreateVolumeSnapshotRequest): - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if volume_snapshot is not None: - request.volume_snapshot = volume_snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def restore_volume_snapshot(self, - request: Optional[Union[gcb_volume_snapshot.RestoreVolumeSnapshotRequest, dict]] = None, - *, - volume_snapshot: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Uses the specified snapshot to restore its parent volume. - Returns INVALID_ARGUMENT if called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_restore_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( - volume_snapshot="volume_snapshot_value", - ) - - # Make the request - operation = client.restore_volume_snapshot(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest, dict]): - The request object. Message for restoring a volume - snapshot. - volume_snapshot (str): - Required. Name of the snapshot which - will be used to restore its parent - volume. - - This corresponds to the ``volume_snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.VolumeSnapshot` - A snapshot of a volume. Only boot volumes can have - snapshots. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [volume_snapshot] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_volume_snapshot.RestoreVolumeSnapshotRequest): - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if volume_snapshot is not None: - request.volume_snapshot = volume_snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.restore_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("volume_snapshot", request.volume_snapshot), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_volume_snapshot.VolumeSnapshot, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def delete_volume_snapshot(self, - request: Optional[Union[volume_snapshot.DeleteVolumeSnapshotRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a volume snapshot. Returns INVALID_ARGUMENT if called - for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_delete_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - client.delete_volume_snapshot(request=request) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest, dict]): - The request object. Message for deleting named Volume - snapshot. - name (str): - Required. The name of the snapshot to - delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume_snapshot.DeleteVolumeSnapshotRequest): - request = volume_snapshot.DeleteVolumeSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_volume_snapshot(self, - request: Optional[Union[volume_snapshot.GetVolumeSnapshotRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> volume_snapshot.VolumeSnapshot: - r"""Returns the specified snapshot resource. Returns - INVALID_ARGUMENT if called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - response = client.get_volume_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest, dict]): - The request object. Message for requesting volume - snapshot information. - name (str): - Required. The name of the snapshot. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.VolumeSnapshot: - A snapshot of a volume. Only boot - volumes can have snapshots. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume_snapshot.GetVolumeSnapshotRequest): - request = volume_snapshot.GetVolumeSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_volume_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_volume_snapshots(self, - request: Optional[Union[volume_snapshot.ListVolumeSnapshotsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListVolumeSnapshotsPager: - r"""Retrieves the list of snapshots for the specified - volume. Returns a response with an empty list of - snapshots if called for a non-boot volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_volume_snapshots(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volume_snapshots(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest, dict]): - The request object. Message for requesting a list of - volume snapshots. - parent (str): - Required. Parent value for - ListVolumesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsPager: - Response message containing the list - of volume snapshots. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, volume_snapshot.ListVolumeSnapshotsRequest): - request = volume_snapshot.ListVolumeSnapshotsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_volume_snapshots] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListVolumeSnapshotsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_lun(self, - request: Optional[Union[lun.GetLunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> lun.Lun: - r"""Get details of a single storage logical unit - number(LUN). - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetLunRequest( - name="name_value", - ) - - # Make the request - response = client.get_lun(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetLunRequest, dict]): - The request object. Message for requesting storage lun - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Lun: - A storage volume logical unit number - (LUN). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, lun.GetLunRequest): - request = lun.GetLunRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_lun] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_luns(self, - request: Optional[Union[lun.ListLunsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLunsPager: - r"""List storage volume luns for given storage volume. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_luns(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListLunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_luns(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListLunsRequest, dict]): - The request object. Message for requesting a list of - storage volume luns. - parent (str): - Required. Parent value for - ListLunsRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsPager: - Response message containing the list - of storage volume luns. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, lun.ListLunsRequest): - request = lun.ListLunsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_luns] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLunsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def evict_lun(self, - request: Optional[Union[lun.EvictLunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Skips lun's cooloff and deletes it now. - Lun must be in cooloff state. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_evict_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictLunRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_lun(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.EvictLunRequest, dict]): - The request object. Request for skip lun cooloff and - delete it. - name (str): - Required. The name of the lun. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, lun.EvictLunRequest): - request = lun.EvictLunRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.evict_lun] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_nfs_share(self, - request: Optional[Union[nfs_share.GetNfsShareRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> nfs_share.NfsShare: - r"""Get details of a single NFS share. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNfsShareRequest( - name="name_value", - ) - - # Make the request - response = client.get_nfs_share(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest, dict]): - The request object. Message for requesting NFS share - information. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.NfsShare: - An NFS share. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.GetNfsShareRequest): - request = nfs_share.GetNfsShareRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_nfs_shares(self, - request: Optional[Union[nfs_share.ListNfsSharesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListNfsSharesPager: - r"""List NFS shares. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_nfs_shares(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNfsSharesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_nfs_shares(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest, dict]): - The request object. Message for requesting a list of NFS - shares. - parent (str): - Required. Parent value for - ListNfsSharesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesPager: - Response message containing the list - of NFS shares. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.ListNfsSharesRequest): - request = nfs_share.ListNfsSharesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_nfs_shares] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListNfsSharesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_nfs_share(self, - request: Optional[Union[gcb_nfs_share.UpdateNfsShareRequest, dict]] = None, - *, - nfs_share: Optional[gcb_nfs_share.NfsShare] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Update details of a single NFS share. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_update_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNfsShareRequest( - ) - - # Make the request - operation = client.update_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest, dict]): - The request object. Message requesting to updating an NFS - share. - nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): - Required. The NFS share to update. - - The ``name`` field is used to identify the NFS share to - update. Format: - projects/{project}/locations/{location}/nfsShares/{nfs_share} - - This corresponds to the ``nfs_share`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently - supported fields are: ``labels`` ``allowed_clients`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` - An NFS share. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [nfs_share, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_nfs_share.UpdateNfsShareRequest): - request = gcb_nfs_share.UpdateNfsShareRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if nfs_share is not None: - request.nfs_share = nfs_share - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("nfs_share.name", request.nfs_share.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_nfs_share.NfsShare, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def create_nfs_share(self, - request: Optional[Union[gcb_nfs_share.CreateNfsShareRequest, dict]] = None, - *, - parent: Optional[str] = None, - nfs_share: Optional[gcb_nfs_share.NfsShare] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create an NFS share. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_create_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateNfsShareRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest, dict]): - The request object. Message for creating an NFS share. - parent (str): - Required. The parent project and - location. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): - Required. The NfsShare to create. - This corresponds to the ``nfs_share`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bare_metal_solution_v2.types.NfsShare` - An NFS share. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, nfs_share] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcb_nfs_share.CreateNfsShareRequest): - request = gcb_nfs_share.CreateNfsShareRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if nfs_share is not None: - request.nfs_share = nfs_share - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - gcb_nfs_share.NfsShare, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def rename_nfs_share(self, - request: Optional[Union[nfs_share.RenameNfsShareRequest, dict]] = None, - *, - name: Optional[str] = None, - new_nfsshare_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> nfs_share.NfsShare: - r"""RenameNfsShare sets a new name for an nfsshare. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_rename_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNfsShareRequest( - name="name_value", - new_nfsshare_id="new_nfsshare_id_value", - ) - - # Make the request - response = client.rename_nfs_share(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest, dict]): - The request object. Message requesting rename of a - server. - name (str): - Required. The ``name`` field is used to identify the - nfsshare. Format: - projects/{project}/locations/{location}/nfsshares/{nfsshare} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_nfsshare_id (str): - Required. The new ``id`` of the nfsshare. - This corresponds to the ``new_nfsshare_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.NfsShare: - An NFS share. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_nfsshare_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.RenameNfsShareRequest): - request = nfs_share.RenameNfsShareRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_nfsshare_id is not None: - request.new_nfsshare_id = new_nfsshare_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_nfs_share(self, - request: Optional[Union[nfs_share.DeleteNfsShareRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete an NFS share. The underlying volume is - automatically deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_delete_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteNfsShareRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest, dict]): - The request object. Message for deleting an NFS share. - name (str): - Required. The name of the NFS share - to delete. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, nfs_share.DeleteNfsShareRequest): - request = nfs_share.DeleteNfsShareRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_nfs_share] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=baremetalsolution.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_provisioning_quotas(self, - request: Optional[Union[provisioning.ListProvisioningQuotasRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProvisioningQuotasPager: - r"""List the budget details to provision resources on a - given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_provisioning_quotas(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListProvisioningQuotasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_provisioning_quotas(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest, dict]): - The request object. Message for requesting the list of - provisioning quotas. - parent (str): - Required. Parent value for - ListProvisioningQuotasRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasPager: - Response message for the list of - provisioning quotas. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.ListProvisioningQuotasRequest): - request = provisioning.ListProvisioningQuotasRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_provisioning_quotas] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListProvisioningQuotasPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def submit_provisioning_config(self, - request: Optional[Union[provisioning.SubmitProvisioningConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - provisioning_config: Optional[provisioning.ProvisioningConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.SubmitProvisioningConfigResponse: - r"""Submit a provisiong configuration for a given - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_submit_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = client.submit_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest, dict]): - The request object. Request for SubmitProvisioningConfig. - parent (str): - Required. The parent project and - location containing the - ProvisioningConfig. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - Required. The ProvisioningConfig to - create. - - This corresponds to the ``provisioning_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse: - Response for - SubmitProvisioningConfig. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, provisioning_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.SubmitProvisioningConfigRequest): - request = provisioning.SubmitProvisioningConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if provisioning_config is not None: - request.provisioning_config = provisioning_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.submit_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_provisioning_config(self, - request: Optional[Union[provisioning.GetProvisioningConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.ProvisioningConfig: - r"""Get ProvisioningConfig by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_get_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetProvisioningConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest, dict]): - The request object. Request for GetProvisioningConfig. - name (str): - Required. Name of the - ProvisioningConfig. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: - A provisioning configuration. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.GetProvisioningConfigRequest): - request = provisioning.GetProvisioningConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_provisioning_config(self, - request: Optional[Union[provisioning.CreateProvisioningConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - provisioning_config: Optional[provisioning.ProvisioningConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.ProvisioningConfig: - r"""Create new ProvisioningConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_create_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest, dict]): - The request object. Request for CreateProvisioningConfig. - parent (str): - Required. The parent project and - location containing the - ProvisioningConfig. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - Required. The ProvisioningConfig to - create. - - This corresponds to the ``provisioning_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: - A provisioning configuration. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, provisioning_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.CreateProvisioningConfigRequest): - request = provisioning.CreateProvisioningConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if provisioning_config is not None: - request.provisioning_config = provisioning_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_provisioning_config(self, - request: Optional[Union[provisioning.UpdateProvisioningConfigRequest, dict]] = None, - *, - provisioning_config: Optional[provisioning.ProvisioningConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> provisioning.ProvisioningConfig: - r"""Update existing ProvisioningConfig. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_update_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( - ) - - # Make the request - response = client.update_provisioning_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest, dict]): - The request object. Message for updating a - ProvisioningConfig. - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - Required. The ProvisioningConfig to - update. - - This corresponds to the ``provisioning_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to - update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.ProvisioningConfig: - A provisioning configuration. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [provisioning_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, provisioning.UpdateProvisioningConfigRequest): - request = provisioning.UpdateProvisioningConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if provisioning_config is not None: - request.provisioning_config = provisioning_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_provisioning_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("provisioning_config.name", request.provisioning_config.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_network(self, - request: Optional[Union[network.RenameNetworkRequest, dict]] = None, - *, - name: Optional[str] = None, - new_network_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> network.Network: - r"""RenameNetwork sets a new name for a network. - Use with caution, previous names become immediately - invalidated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_rename_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNetworkRequest( - name="name_value", - new_network_id="new_network_id_value", - ) - - # Make the request - response = client.rename_network(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest, dict]): - The request object. Message requesting rename of a - server. - name (str): - Required. The ``name`` field is used to identify the - network. Format: - projects/{project}/locations/{location}/networks/{network} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_network_id (str): - Required. The new ``id`` of the network. - This corresponds to the ``new_network_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.types.Network: - A Network. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_network_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, network.RenameNetworkRequest): - request = network.RenameNetworkRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_network_id is not None: - request.new_network_id = new_network_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_network] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_os_images(self, - request: Optional[Union[osimage.ListOSImagesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListOSImagesPager: - r"""Retrieves the list of OS images which are currently - approved. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bare_metal_solution_v2 - - def sample_list_os_images(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListOSImagesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_os_images(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest, dict]): - The request object. Request for getting all available OS - images. - parent (str): - Required. Parent value for - ListProvisioningQuotasRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesPager: - Request for getting all available OS - images. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, osimage.ListOSImagesRequest): - request = osimage.ListOSImagesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_os_images] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListOSImagesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "BareMetalSolutionClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BareMetalSolutionClient", -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py deleted file mode 100644 index 98285b55c657..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/pagers.py +++ /dev/null @@ -1,1286 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot - - -class ListInstancesPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListInstancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., instance.ListInstancesResponse], - request: instance.ListInstancesRequest, - response: instance.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListInstancesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListInstancesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = instance.ListInstancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[instance.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[instance.Instance]: - for page in self.pages: - yield from page.instances - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInstancesAsyncPager: - """A pager for iterating through ``list_instances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListInstancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``instances`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInstances`` requests and continue to iterate - through the ``instances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListInstancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[instance.ListInstancesResponse]], - request: instance.ListInstancesRequest, - response: instance.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListInstancesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListInstancesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = instance.ListInstancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[instance.ListInstancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[instance.Instance]: - async def async_generator(): - async for page in self.pages: - for response in page.instances: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSSHKeysPager: - """A pager for iterating through ``list_ssh_keys`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` object, and - provides an ``__iter__`` method to iterate through its - ``ssh_keys`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSSHKeys`` requests and continue to iterate - through the ``ssh_keys`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., ssh_key.ListSSHKeysResponse], - request: ssh_key.ListSSHKeysRequest, - response: ssh_key.ListSSHKeysResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = ssh_key.ListSSHKeysRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[ssh_key.ListSSHKeysResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[ssh_key.SSHKey]: - for page in self.pages: - yield from page.ssh_keys - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSSHKeysAsyncPager: - """A pager for iterating through ``list_ssh_keys`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``ssh_keys`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSSHKeys`` requests and continue to iterate - through the ``ssh_keys`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[ssh_key.ListSSHKeysResponse]], - request: ssh_key.ListSSHKeysRequest, - response: ssh_key.ListSSHKeysResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListSSHKeysResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = ssh_key.ListSSHKeysRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[ssh_key.ListSSHKeysResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[ssh_key.SSHKey]: - async def async_generator(): - async for page in self.pages: - for response in page.ssh_keys: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListVolumesPager: - """A pager for iterating through ``list_volumes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListVolumesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``volumes`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListVolumes`` requests and continue to iterate - through the ``volumes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListVolumesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., volume.ListVolumesResponse], - request: volume.ListVolumesRequest, - response: volume.ListVolumesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListVolumesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListVolumesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = volume.ListVolumesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[volume.ListVolumesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[volume.Volume]: - for page in self.pages: - yield from page.volumes - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListVolumesAsyncPager: - """A pager for iterating through ``list_volumes`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListVolumesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``volumes`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListVolumes`` requests and continue to iterate - through the ``volumes`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListVolumesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[volume.ListVolumesResponse]], - request: volume.ListVolumesRequest, - response: volume.ListVolumesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListVolumesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListVolumesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = volume.ListVolumesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[volume.ListVolumesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[volume.Volume]: - async def async_generator(): - async for page in self.pages: - for response in page.volumes: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNetworksPager: - """A pager for iterating through ``list_networks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListNetworksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``networks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListNetworks`` requests and continue to iterate - through the ``networks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListNetworksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., network.ListNetworksResponse], - request: network.ListNetworksRequest, - response: network.ListNetworksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListNetworksRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListNetworksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = network.ListNetworksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[network.ListNetworksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[network.Network]: - for page in self.pages: - yield from page.networks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNetworksAsyncPager: - """A pager for iterating through ``list_networks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListNetworksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``networks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListNetworks`` requests and continue to iterate - through the ``networks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListNetworksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[network.ListNetworksResponse]], - request: network.ListNetworksRequest, - response: network.ListNetworksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListNetworksRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListNetworksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = network.ListNetworksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[network.ListNetworksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[network.Network]: - async def async_generator(): - async for page in self.pages: - for response in page.networks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListVolumeSnapshotsPager: - """A pager for iterating through ``list_volume_snapshots`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``volume_snapshots`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListVolumeSnapshots`` requests and continue to iterate - through the ``volume_snapshots`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., volume_snapshot.ListVolumeSnapshotsResponse], - request: volume_snapshot.ListVolumeSnapshotsRequest, - response: volume_snapshot.ListVolumeSnapshotsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = volume_snapshot.ListVolumeSnapshotsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[volume_snapshot.ListVolumeSnapshotsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[volume_snapshot.VolumeSnapshot]: - for page in self.pages: - yield from page.volume_snapshots - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListVolumeSnapshotsAsyncPager: - """A pager for iterating through ``list_volume_snapshots`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``volume_snapshots`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListVolumeSnapshots`` requests and continue to iterate - through the ``volume_snapshots`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[volume_snapshot.ListVolumeSnapshotsResponse]], - request: volume_snapshot.ListVolumeSnapshotsRequest, - response: volume_snapshot.ListVolumeSnapshotsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = volume_snapshot.ListVolumeSnapshotsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[volume_snapshot.ListVolumeSnapshotsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[volume_snapshot.VolumeSnapshot]: - async def async_generator(): - async for page in self.pages: - for response in page.volume_snapshots: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLunsPager: - """A pager for iterating through ``list_luns`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListLunsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``luns`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLuns`` requests and continue to iterate - through the ``luns`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListLunsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., lun.ListLunsResponse], - request: lun.ListLunsRequest, - response: lun.ListLunsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListLunsRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListLunsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = lun.ListLunsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[lun.ListLunsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[lun.Lun]: - for page in self.pages: - yield from page.luns - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLunsAsyncPager: - """A pager for iterating through ``list_luns`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListLunsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``luns`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLuns`` requests and continue to iterate - through the ``luns`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListLunsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[lun.ListLunsResponse]], - request: lun.ListLunsRequest, - response: lun.ListLunsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListLunsRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListLunsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = lun.ListLunsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[lun.ListLunsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[lun.Lun]: - async def async_generator(): - async for page in self.pages: - for response in page.luns: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNfsSharesPager: - """A pager for iterating through ``list_nfs_shares`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListNfsSharesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``nfs_shares`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListNfsShares`` requests and continue to iterate - through the ``nfs_shares`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListNfsSharesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., nfs_share.ListNfsSharesResponse], - request: nfs_share.ListNfsSharesRequest, - response: nfs_share.ListNfsSharesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListNfsSharesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = nfs_share.ListNfsSharesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[nfs_share.ListNfsSharesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[nfs_share.NfsShare]: - for page in self.pages: - yield from page.nfs_shares - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListNfsSharesAsyncPager: - """A pager for iterating through ``list_nfs_shares`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListNfsSharesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``nfs_shares`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListNfsShares`` requests and continue to iterate - through the ``nfs_shares`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListNfsSharesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[nfs_share.ListNfsSharesResponse]], - request: nfs_share.ListNfsSharesRequest, - response: nfs_share.ListNfsSharesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListNfsSharesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = nfs_share.ListNfsSharesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[nfs_share.ListNfsSharesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[nfs_share.NfsShare]: - async def async_generator(): - async for page in self.pages: - for response in page.nfs_shares: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListProvisioningQuotasPager: - """A pager for iterating through ``list_provisioning_quotas`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` object, and - provides an ``__iter__`` method to iterate through its - ``provisioning_quotas`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListProvisioningQuotas`` requests and continue to iterate - through the ``provisioning_quotas`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., provisioning.ListProvisioningQuotasResponse], - request: provisioning.ListProvisioningQuotasRequest, - response: provisioning.ListProvisioningQuotasResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = provisioning.ListProvisioningQuotasRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[provisioning.ListProvisioningQuotasResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[provisioning.ProvisioningQuota]: - for page in self.pages: - yield from page.provisioning_quotas - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListProvisioningQuotasAsyncPager: - """A pager for iterating through ``list_provisioning_quotas`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``provisioning_quotas`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListProvisioningQuotas`` requests and continue to iterate - through the ``provisioning_quotas`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[provisioning.ListProvisioningQuotasResponse]], - request: provisioning.ListProvisioningQuotasRequest, - response: provisioning.ListProvisioningQuotasResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = provisioning.ListProvisioningQuotasRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[provisioning.ListProvisioningQuotasResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[provisioning.ProvisioningQuota]: - async def async_generator(): - async for page in self.pages: - for response in page.provisioning_quotas: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListOSImagesPager: - """A pager for iterating through ``list_os_images`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``os_images`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListOSImages`` requests and continue to iterate - through the ``os_images`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., osimage.ListOSImagesResponse], - request: osimage.ListOSImagesRequest, - response: osimage.ListOSImagesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = osimage.ListOSImagesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[osimage.ListOSImagesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[osimage.OSImage]: - for page in self.pages: - yield from page.os_images - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListOSImagesAsyncPager: - """A pager for iterating through ``list_os_images`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``os_images`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListOSImages`` requests and continue to iterate - through the ``os_images`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[osimage.ListOSImagesResponse]], - request: osimage.ListOSImagesRequest, - response: osimage.ListOSImagesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest): - The initial request object. - response (google.cloud.bare_metal_solution_v2.types.ListOSImagesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = osimage.ListOSImagesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[osimage.ListOSImagesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[osimage.OSImage]: - async def async_generator(): - async for page in self.pages: - for response in page.os_images: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/README.rst b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/README.rst deleted file mode 100644 index bdd872eef937..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`BareMetalSolutionTransport` is the ABC for all transports. -- public child `BareMetalSolutionGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `BareMetalSolutionGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseBareMetalSolutionRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `BareMetalSolutionRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/__init__.py deleted file mode 100644 index fde69e18a5cf..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BareMetalSolutionTransport -from .grpc import BareMetalSolutionGrpcTransport -from .grpc_asyncio import BareMetalSolutionGrpcAsyncIOTransport -from .rest import BareMetalSolutionRestTransport -from .rest import BareMetalSolutionRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BareMetalSolutionTransport]] -_transport_registry['grpc'] = BareMetalSolutionGrpcTransport -_transport_registry['grpc_asyncio'] = BareMetalSolutionGrpcAsyncIOTransport -_transport_registry['rest'] = BareMetalSolutionRestTransport - -__all__ = ( - 'BareMetalSolutionTransport', - 'BareMetalSolutionGrpcTransport', - 'BareMetalSolutionGrpcAsyncIOTransport', - 'BareMetalSolutionRestTransport', - 'BareMetalSolutionRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py deleted file mode 100644 index 245e8ce06113..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/base.py +++ /dev/null @@ -1,807 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bare_metal_solution_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BareMetalSolutionTransport(abc.ABC): - """Abstract transport class for BareMetalSolution.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'baremetalsolution.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'baremetalsolution.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, - default_timeout=None, - client_info=client_info, - ), - self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, - default_timeout=None, - client_info=client_info, - ), - self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, - default_timeout=None, - client_info=client_info, - ), - self.rename_instance: gapic_v1.method.wrap_method( - self.rename_instance, - default_timeout=None, - client_info=client_info, - ), - self.reset_instance: gapic_v1.method.wrap_method( - self.reset_instance, - default_timeout=None, - client_info=client_info, - ), - self.start_instance: gapic_v1.method.wrap_method( - self.start_instance, - default_timeout=None, - client_info=client_info, - ), - self.stop_instance: gapic_v1.method.wrap_method( - self.stop_instance, - default_timeout=None, - client_info=client_info, - ), - self.enable_interactive_serial_console: gapic_v1.method.wrap_method( - self.enable_interactive_serial_console, - default_timeout=None, - client_info=client_info, - ), - self.disable_interactive_serial_console: gapic_v1.method.wrap_method( - self.disable_interactive_serial_console, - default_timeout=None, - client_info=client_info, - ), - self.detach_lun: gapic_v1.method.wrap_method( - self.detach_lun, - default_timeout=None, - client_info=client_info, - ), - self.list_ssh_keys: gapic_v1.method.wrap_method( - self.list_ssh_keys, - default_timeout=None, - client_info=client_info, - ), - self.create_ssh_key: gapic_v1.method.wrap_method( - self.create_ssh_key, - default_timeout=None, - client_info=client_info, - ), - self.delete_ssh_key: gapic_v1.method.wrap_method( - self.delete_ssh_key, - default_timeout=None, - client_info=client_info, - ), - self.list_volumes: gapic_v1.method.wrap_method( - self.list_volumes, - default_timeout=None, - client_info=client_info, - ), - self.get_volume: gapic_v1.method.wrap_method( - self.get_volume, - default_timeout=None, - client_info=client_info, - ), - self.update_volume: gapic_v1.method.wrap_method( - self.update_volume, - default_timeout=None, - client_info=client_info, - ), - self.rename_volume: gapic_v1.method.wrap_method( - self.rename_volume, - default_timeout=None, - client_info=client_info, - ), - self.evict_volume: gapic_v1.method.wrap_method( - self.evict_volume, - default_timeout=None, - client_info=client_info, - ), - self.resize_volume: gapic_v1.method.wrap_method( - self.resize_volume, - default_timeout=None, - client_info=client_info, - ), - self.list_networks: gapic_v1.method.wrap_method( - self.list_networks, - default_timeout=None, - client_info=client_info, - ), - self.list_network_usage: gapic_v1.method.wrap_method( - self.list_network_usage, - default_timeout=None, - client_info=client_info, - ), - self.get_network: gapic_v1.method.wrap_method( - self.get_network, - default_timeout=None, - client_info=client_info, - ), - self.update_network: gapic_v1.method.wrap_method( - self.update_network, - default_timeout=None, - client_info=client_info, - ), - self.create_volume_snapshot: gapic_v1.method.wrap_method( - self.create_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.restore_volume_snapshot: gapic_v1.method.wrap_method( - self.restore_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.delete_volume_snapshot: gapic_v1.method.wrap_method( - self.delete_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.get_volume_snapshot: gapic_v1.method.wrap_method( - self.get_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.list_volume_snapshots: gapic_v1.method.wrap_method( - self.list_volume_snapshots, - default_timeout=None, - client_info=client_info, - ), - self.get_lun: gapic_v1.method.wrap_method( - self.get_lun, - default_timeout=None, - client_info=client_info, - ), - self.list_luns: gapic_v1.method.wrap_method( - self.list_luns, - default_timeout=None, - client_info=client_info, - ), - self.evict_lun: gapic_v1.method.wrap_method( - self.evict_lun, - default_timeout=None, - client_info=client_info, - ), - self.get_nfs_share: gapic_v1.method.wrap_method( - self.get_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.list_nfs_shares: gapic_v1.method.wrap_method( - self.list_nfs_shares, - default_timeout=None, - client_info=client_info, - ), - self.update_nfs_share: gapic_v1.method.wrap_method( - self.update_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.create_nfs_share: gapic_v1.method.wrap_method( - self.create_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.rename_nfs_share: gapic_v1.method.wrap_method( - self.rename_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.delete_nfs_share: gapic_v1.method.wrap_method( - self.delete_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.list_provisioning_quotas: gapic_v1.method.wrap_method( - self.list_provisioning_quotas, - default_timeout=None, - client_info=client_info, - ), - self.submit_provisioning_config: gapic_v1.method.wrap_method( - self.submit_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.get_provisioning_config: gapic_v1.method.wrap_method( - self.get_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.create_provisioning_config: gapic_v1.method.wrap_method( - self.create_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.update_provisioning_config: gapic_v1.method.wrap_method( - self.update_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.rename_network: gapic_v1.method.wrap_method( - self.rename_network, - default_timeout=None, - client_info=client_info, - ), - self.list_os_images: gapic_v1.method.wrap_method( - self.list_os_images, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_instances(self) -> Callable[ - [instance.ListInstancesRequest], - Union[ - instance.ListInstancesResponse, - Awaitable[instance.ListInstancesResponse] - ]]: - raise NotImplementedError() - - @property - def get_instance(self) -> Callable[ - [instance.GetInstanceRequest], - Union[ - instance.Instance, - Awaitable[instance.Instance] - ]]: - raise NotImplementedError() - - @property - def update_instance(self) -> Callable[ - [gcb_instance.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def rename_instance(self) -> Callable[ - [instance.RenameInstanceRequest], - Union[ - instance.Instance, - Awaitable[instance.Instance] - ]]: - raise NotImplementedError() - - @property - def reset_instance(self) -> Callable[ - [instance.ResetInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def start_instance(self) -> Callable[ - [instance.StartInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def stop_instance(self) -> Callable[ - [instance.StopInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def enable_interactive_serial_console(self) -> Callable[ - [instance.EnableInteractiveSerialConsoleRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def disable_interactive_serial_console(self) -> Callable[ - [instance.DisableInteractiveSerialConsoleRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def detach_lun(self) -> Callable[ - [gcb_instance.DetachLunRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_ssh_keys(self) -> Callable[ - [ssh_key.ListSSHKeysRequest], - Union[ - ssh_key.ListSSHKeysResponse, - Awaitable[ssh_key.ListSSHKeysResponse] - ]]: - raise NotImplementedError() - - @property - def create_ssh_key(self) -> Callable[ - [gcb_ssh_key.CreateSSHKeyRequest], - Union[ - gcb_ssh_key.SSHKey, - Awaitable[gcb_ssh_key.SSHKey] - ]]: - raise NotImplementedError() - - @property - def delete_ssh_key(self) -> Callable[ - [ssh_key.DeleteSSHKeyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_volumes(self) -> Callable[ - [volume.ListVolumesRequest], - Union[ - volume.ListVolumesResponse, - Awaitable[volume.ListVolumesResponse] - ]]: - raise NotImplementedError() - - @property - def get_volume(self) -> Callable[ - [volume.GetVolumeRequest], - Union[ - volume.Volume, - Awaitable[volume.Volume] - ]]: - raise NotImplementedError() - - @property - def update_volume(self) -> Callable[ - [gcb_volume.UpdateVolumeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def rename_volume(self) -> Callable[ - [volume.RenameVolumeRequest], - Union[ - volume.Volume, - Awaitable[volume.Volume] - ]]: - raise NotImplementedError() - - @property - def evict_volume(self) -> Callable[ - [volume.EvictVolumeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def resize_volume(self) -> Callable[ - [gcb_volume.ResizeVolumeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_networks(self) -> Callable[ - [network.ListNetworksRequest], - Union[ - network.ListNetworksResponse, - Awaitable[network.ListNetworksResponse] - ]]: - raise NotImplementedError() - - @property - def list_network_usage(self) -> Callable[ - [network.ListNetworkUsageRequest], - Union[ - network.ListNetworkUsageResponse, - Awaitable[network.ListNetworkUsageResponse] - ]]: - raise NotImplementedError() - - @property - def get_network(self) -> Callable[ - [network.GetNetworkRequest], - Union[ - network.Network, - Awaitable[network.Network] - ]]: - raise NotImplementedError() - - @property - def update_network(self) -> Callable[ - [gcb_network.UpdateNetworkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.CreateVolumeSnapshotRequest], - Union[ - gcb_volume_snapshot.VolumeSnapshot, - Awaitable[gcb_volume_snapshot.VolumeSnapshot] - ]]: - raise NotImplementedError() - - @property - def restore_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_volume_snapshot(self) -> Callable[ - [volume_snapshot.DeleteVolumeSnapshotRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_volume_snapshot(self) -> Callable[ - [volume_snapshot.GetVolumeSnapshotRequest], - Union[ - volume_snapshot.VolumeSnapshot, - Awaitable[volume_snapshot.VolumeSnapshot] - ]]: - raise NotImplementedError() - - @property - def list_volume_snapshots(self) -> Callable[ - [volume_snapshot.ListVolumeSnapshotsRequest], - Union[ - volume_snapshot.ListVolumeSnapshotsResponse, - Awaitable[volume_snapshot.ListVolumeSnapshotsResponse] - ]]: - raise NotImplementedError() - - @property - def get_lun(self) -> Callable[ - [lun.GetLunRequest], - Union[ - lun.Lun, - Awaitable[lun.Lun] - ]]: - raise NotImplementedError() - - @property - def list_luns(self) -> Callable[ - [lun.ListLunsRequest], - Union[ - lun.ListLunsResponse, - Awaitable[lun.ListLunsResponse] - ]]: - raise NotImplementedError() - - @property - def evict_lun(self) -> Callable[ - [lun.EvictLunRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_nfs_share(self) -> Callable[ - [nfs_share.GetNfsShareRequest], - Union[ - nfs_share.NfsShare, - Awaitable[nfs_share.NfsShare] - ]]: - raise NotImplementedError() - - @property - def list_nfs_shares(self) -> Callable[ - [nfs_share.ListNfsSharesRequest], - Union[ - nfs_share.ListNfsSharesResponse, - Awaitable[nfs_share.ListNfsSharesResponse] - ]]: - raise NotImplementedError() - - @property - def update_nfs_share(self) -> Callable[ - [gcb_nfs_share.UpdateNfsShareRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def create_nfs_share(self) -> Callable[ - [gcb_nfs_share.CreateNfsShareRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def rename_nfs_share(self) -> Callable[ - [nfs_share.RenameNfsShareRequest], - Union[ - nfs_share.NfsShare, - Awaitable[nfs_share.NfsShare] - ]]: - raise NotImplementedError() - - @property - def delete_nfs_share(self) -> Callable[ - [nfs_share.DeleteNfsShareRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_provisioning_quotas(self) -> Callable[ - [provisioning.ListProvisioningQuotasRequest], - Union[ - provisioning.ListProvisioningQuotasResponse, - Awaitable[provisioning.ListProvisioningQuotasResponse] - ]]: - raise NotImplementedError() - - @property - def submit_provisioning_config(self) -> Callable[ - [provisioning.SubmitProvisioningConfigRequest], - Union[ - provisioning.SubmitProvisioningConfigResponse, - Awaitable[provisioning.SubmitProvisioningConfigResponse] - ]]: - raise NotImplementedError() - - @property - def get_provisioning_config(self) -> Callable[ - [provisioning.GetProvisioningConfigRequest], - Union[ - provisioning.ProvisioningConfig, - Awaitable[provisioning.ProvisioningConfig] - ]]: - raise NotImplementedError() - - @property - def create_provisioning_config(self) -> Callable[ - [provisioning.CreateProvisioningConfigRequest], - Union[ - provisioning.ProvisioningConfig, - Awaitable[provisioning.ProvisioningConfig] - ]]: - raise NotImplementedError() - - @property - def update_provisioning_config(self) -> Callable[ - [provisioning.UpdateProvisioningConfigRequest], - Union[ - provisioning.ProvisioningConfig, - Awaitable[provisioning.ProvisioningConfig] - ]]: - raise NotImplementedError() - - @property - def rename_network(self) -> Callable[ - [network.RenameNetworkRequest], - Union[ - network.Network, - Awaitable[network.Network] - ]]: - raise NotImplementedError() - - @property - def list_os_images(self) -> Callable[ - [osimage.ListOSImagesRequest], - Union[ - osimage.ListOSImagesResponse, - Awaitable[osimage.ListOSImagesResponse] - ]]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BareMetalSolutionTransport', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py deleted file mode 100644 index 1df6a1b377c3..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc.py +++ /dev/null @@ -1,1579 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BareMetalSolutionTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BareMetalSolutionGrpcTransport(BareMetalSolutionTransport): - """gRPC backend transport for BareMetalSolution. - - Performs management operations on Bare Metal Solution servers. - - The ``baremetalsolution.googleapis.com`` service provides management - capabilities for Bare Metal Solution servers. To access the API - methods, you must assign Bare Metal Solution IAM roles containing - the desired permissions to your staff in your Google Cloud project. - You must also enable the Bare Metal Solution API. Once enabled, the - methods act upon specific servers in your Bare Metal Solution - environment. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'baremetalsolution.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'baremetalsolution.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'baremetalsolution.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instances(self) -> Callable[ - [instance.ListInstancesRequest], - instance.ListInstancesResponse]: - r"""Return a callable for the list instances method over gRPC. - - List servers in a given project and location. - - Returns: - Callable[[~.ListInstancesRequest], - ~.ListInstancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListInstances', - request_serializer=instance.ListInstancesRequest.serialize, - response_deserializer=instance.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def get_instance(self) -> Callable[ - [instance.GetInstanceRequest], - instance.Instance]: - r"""Return a callable for the get instance method over gRPC. - - Get details about a single server. - - Returns: - Callable[[~.GetInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetInstance', - request_serializer=instance.GetInstanceRequest.serialize, - response_deserializer=instance.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def update_instance(self) -> Callable[ - [gcb_instance.UpdateInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update instance method over gRPC. - - Update details of a single server. - - Returns: - Callable[[~.UpdateInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateInstance', - request_serializer=gcb_instance.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def rename_instance(self) -> Callable[ - [instance.RenameInstanceRequest], - instance.Instance]: - r"""Return a callable for the rename instance method over gRPC. - - RenameInstance sets a new name for an instance. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameInstanceRequest], - ~.Instance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_instance' not in self._stubs: - self._stubs['rename_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameInstance', - request_serializer=instance.RenameInstanceRequest.serialize, - response_deserializer=instance.Instance.deserialize, - ) - return self._stubs['rename_instance'] - - @property - def reset_instance(self) -> Callable[ - [instance.ResetInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the reset instance method over gRPC. - - Perform an ungraceful, hard reset on a server. - Equivalent to shutting the power off and then turning it - back on. - - Returns: - Callable[[~.ResetInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reset_instance' not in self._stubs: - self._stubs['reset_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ResetInstance', - request_serializer=instance.ResetInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['reset_instance'] - - @property - def start_instance(self) -> Callable[ - [instance.StartInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the start instance method over gRPC. - - Starts a server that was shutdown. - - Returns: - Callable[[~.StartInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_instance' not in self._stubs: - self._stubs['start_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/StartInstance', - request_serializer=instance.StartInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['start_instance'] - - @property - def stop_instance(self) -> Callable[ - [instance.StopInstanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the stop instance method over gRPC. - - Stop a running server. - - Returns: - Callable[[~.StopInstanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'stop_instance' not in self._stubs: - self._stubs['stop_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/StopInstance', - request_serializer=instance.StopInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['stop_instance'] - - @property - def enable_interactive_serial_console(self) -> Callable[ - [instance.EnableInteractiveSerialConsoleRequest], - operations_pb2.Operation]: - r"""Return a callable for the enable interactive serial - console method over gRPC. - - Enable the interactive serial console feature on an - instance. - - Returns: - Callable[[~.EnableInteractiveSerialConsoleRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'enable_interactive_serial_console' not in self._stubs: - self._stubs['enable_interactive_serial_console'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/EnableInteractiveSerialConsole', - request_serializer=instance.EnableInteractiveSerialConsoleRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['enable_interactive_serial_console'] - - @property - def disable_interactive_serial_console(self) -> Callable[ - [instance.DisableInteractiveSerialConsoleRequest], - operations_pb2.Operation]: - r"""Return a callable for the disable interactive serial - console method over gRPC. - - Disable the interactive serial console feature on an - instance. - - Returns: - Callable[[~.DisableInteractiveSerialConsoleRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'disable_interactive_serial_console' not in self._stubs: - self._stubs['disable_interactive_serial_console'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DisableInteractiveSerialConsole', - request_serializer=instance.DisableInteractiveSerialConsoleRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['disable_interactive_serial_console'] - - @property - def detach_lun(self) -> Callable[ - [gcb_instance.DetachLunRequest], - operations_pb2.Operation]: - r"""Return a callable for the detach lun method over gRPC. - - Detach LUN from Instance. - - Returns: - Callable[[~.DetachLunRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'detach_lun' not in self._stubs: - self._stubs['detach_lun'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DetachLun', - request_serializer=gcb_instance.DetachLunRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['detach_lun'] - - @property - def list_ssh_keys(self) -> Callable[ - [ssh_key.ListSSHKeysRequest], - ssh_key.ListSSHKeysResponse]: - r"""Return a callable for the list ssh keys method over gRPC. - - Lists the public SSH keys registered for the - specified project. These SSH keys are used only for the - interactive serial console feature. - - Returns: - Callable[[~.ListSSHKeysRequest], - ~.ListSSHKeysResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_ssh_keys' not in self._stubs: - self._stubs['list_ssh_keys'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListSSHKeys', - request_serializer=ssh_key.ListSSHKeysRequest.serialize, - response_deserializer=ssh_key.ListSSHKeysResponse.deserialize, - ) - return self._stubs['list_ssh_keys'] - - @property - def create_ssh_key(self) -> Callable[ - [gcb_ssh_key.CreateSSHKeyRequest], - gcb_ssh_key.SSHKey]: - r"""Return a callable for the create ssh key method over gRPC. - - Register a public SSH key in the specified project - for use with the interactive serial console feature. - - Returns: - Callable[[~.CreateSSHKeyRequest], - ~.SSHKey]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_ssh_key' not in self._stubs: - self._stubs['create_ssh_key'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateSSHKey', - request_serializer=gcb_ssh_key.CreateSSHKeyRequest.serialize, - response_deserializer=gcb_ssh_key.SSHKey.deserialize, - ) - return self._stubs['create_ssh_key'] - - @property - def delete_ssh_key(self) -> Callable[ - [ssh_key.DeleteSSHKeyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete ssh key method over gRPC. - - Deletes a public SSH key registered in the specified - project. - - Returns: - Callable[[~.DeleteSSHKeyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_ssh_key' not in self._stubs: - self._stubs['delete_ssh_key'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteSSHKey', - request_serializer=ssh_key.DeleteSSHKeyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_ssh_key'] - - @property - def list_volumes(self) -> Callable[ - [volume.ListVolumesRequest], - volume.ListVolumesResponse]: - r"""Return a callable for the list volumes method over gRPC. - - List storage volumes in a given project and location. - - Returns: - Callable[[~.ListVolumesRequest], - ~.ListVolumesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_volumes' not in self._stubs: - self._stubs['list_volumes'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListVolumes', - request_serializer=volume.ListVolumesRequest.serialize, - response_deserializer=volume.ListVolumesResponse.deserialize, - ) - return self._stubs['list_volumes'] - - @property - def get_volume(self) -> Callable[ - [volume.GetVolumeRequest], - volume.Volume]: - r"""Return a callable for the get volume method over gRPC. - - Get details of a single storage volume. - - Returns: - Callable[[~.GetVolumeRequest], - ~.Volume]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_volume' not in self._stubs: - self._stubs['get_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetVolume', - request_serializer=volume.GetVolumeRequest.serialize, - response_deserializer=volume.Volume.deserialize, - ) - return self._stubs['get_volume'] - - @property - def update_volume(self) -> Callable[ - [gcb_volume.UpdateVolumeRequest], - operations_pb2.Operation]: - r"""Return a callable for the update volume method over gRPC. - - Update details of a single storage volume. - - Returns: - Callable[[~.UpdateVolumeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_volume' not in self._stubs: - self._stubs['update_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateVolume', - request_serializer=gcb_volume.UpdateVolumeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_volume'] - - @property - def rename_volume(self) -> Callable[ - [volume.RenameVolumeRequest], - volume.Volume]: - r"""Return a callable for the rename volume method over gRPC. - - RenameVolume sets a new name for a volume. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameVolumeRequest], - ~.Volume]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_volume' not in self._stubs: - self._stubs['rename_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameVolume', - request_serializer=volume.RenameVolumeRequest.serialize, - response_deserializer=volume.Volume.deserialize, - ) - return self._stubs['rename_volume'] - - @property - def evict_volume(self) -> Callable[ - [volume.EvictVolumeRequest], - operations_pb2.Operation]: - r"""Return a callable for the evict volume method over gRPC. - - Skips volume's cooloff and deletes it now. - Volume must be in cooloff state. - - Returns: - Callable[[~.EvictVolumeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'evict_volume' not in self._stubs: - self._stubs['evict_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictVolume', - request_serializer=volume.EvictVolumeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['evict_volume'] - - @property - def resize_volume(self) -> Callable[ - [gcb_volume.ResizeVolumeRequest], - operations_pb2.Operation]: - r"""Return a callable for the resize volume method over gRPC. - - Emergency Volume resize. - - Returns: - Callable[[~.ResizeVolumeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resize_volume' not in self._stubs: - self._stubs['resize_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ResizeVolume', - request_serializer=gcb_volume.ResizeVolumeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['resize_volume'] - - @property - def list_networks(self) -> Callable[ - [network.ListNetworksRequest], - network.ListNetworksResponse]: - r"""Return a callable for the list networks method over gRPC. - - List network in a given project and location. - - Returns: - Callable[[~.ListNetworksRequest], - ~.ListNetworksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_networks' not in self._stubs: - self._stubs['list_networks'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListNetworks', - request_serializer=network.ListNetworksRequest.serialize, - response_deserializer=network.ListNetworksResponse.deserialize, - ) - return self._stubs['list_networks'] - - @property - def list_network_usage(self) -> Callable[ - [network.ListNetworkUsageRequest], - network.ListNetworkUsageResponse]: - r"""Return a callable for the list network usage method over gRPC. - - List all Networks (and used IPs for each Network) in - the vendor account associated with the specified - project. - - Returns: - Callable[[~.ListNetworkUsageRequest], - ~.ListNetworkUsageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_network_usage' not in self._stubs: - self._stubs['list_network_usage'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListNetworkUsage', - request_serializer=network.ListNetworkUsageRequest.serialize, - response_deserializer=network.ListNetworkUsageResponse.deserialize, - ) - return self._stubs['list_network_usage'] - - @property - def get_network(self) -> Callable[ - [network.GetNetworkRequest], - network.Network]: - r"""Return a callable for the get network method over gRPC. - - Get details of a single network. - - Returns: - Callable[[~.GetNetworkRequest], - ~.Network]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_network' not in self._stubs: - self._stubs['get_network'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetNetwork', - request_serializer=network.GetNetworkRequest.serialize, - response_deserializer=network.Network.deserialize, - ) - return self._stubs['get_network'] - - @property - def update_network(self) -> Callable[ - [gcb_network.UpdateNetworkRequest], - operations_pb2.Operation]: - r"""Return a callable for the update network method over gRPC. - - Update details of a single network. - - Returns: - Callable[[~.UpdateNetworkRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_network' not in self._stubs: - self._stubs['update_network'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateNetwork', - request_serializer=gcb_network.UpdateNetworkRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_network'] - - @property - def create_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.CreateVolumeSnapshotRequest], - gcb_volume_snapshot.VolumeSnapshot]: - r"""Return a callable for the create volume snapshot method over gRPC. - - Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if - called for a non-boot volume. - - Returns: - Callable[[~.CreateVolumeSnapshotRequest], - ~.VolumeSnapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_volume_snapshot' not in self._stubs: - self._stubs['create_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateVolumeSnapshot', - request_serializer=gcb_volume_snapshot.CreateVolumeSnapshotRequest.serialize, - response_deserializer=gcb_volume_snapshot.VolumeSnapshot.deserialize, - ) - return self._stubs['create_volume_snapshot'] - - @property - def restore_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], - operations_pb2.Operation]: - r"""Return a callable for the restore volume snapshot method over gRPC. - - Uses the specified snapshot to restore its parent volume. - Returns INVALID_ARGUMENT if called for a non-boot volume. - - Returns: - Callable[[~.RestoreVolumeSnapshotRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_volume_snapshot' not in self._stubs: - self._stubs['restore_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RestoreVolumeSnapshot', - request_serializer=gcb_volume_snapshot.RestoreVolumeSnapshotRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_volume_snapshot'] - - @property - def delete_volume_snapshot(self) -> Callable[ - [volume_snapshot.DeleteVolumeSnapshotRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete volume snapshot method over gRPC. - - Deletes a volume snapshot. Returns INVALID_ARGUMENT if called - for a non-boot volume. - - Returns: - Callable[[~.DeleteVolumeSnapshotRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_volume_snapshot' not in self._stubs: - self._stubs['delete_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteVolumeSnapshot', - request_serializer=volume_snapshot.DeleteVolumeSnapshotRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_volume_snapshot'] - - @property - def get_volume_snapshot(self) -> Callable[ - [volume_snapshot.GetVolumeSnapshotRequest], - volume_snapshot.VolumeSnapshot]: - r"""Return a callable for the get volume snapshot method over gRPC. - - Returns the specified snapshot resource. Returns - INVALID_ARGUMENT if called for a non-boot volume. - - Returns: - Callable[[~.GetVolumeSnapshotRequest], - ~.VolumeSnapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_volume_snapshot' not in self._stubs: - self._stubs['get_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetVolumeSnapshot', - request_serializer=volume_snapshot.GetVolumeSnapshotRequest.serialize, - response_deserializer=volume_snapshot.VolumeSnapshot.deserialize, - ) - return self._stubs['get_volume_snapshot'] - - @property - def list_volume_snapshots(self) -> Callable[ - [volume_snapshot.ListVolumeSnapshotsRequest], - volume_snapshot.ListVolumeSnapshotsResponse]: - r"""Return a callable for the list volume snapshots method over gRPC. - - Retrieves the list of snapshots for the specified - volume. Returns a response with an empty list of - snapshots if called for a non-boot volume. - - Returns: - Callable[[~.ListVolumeSnapshotsRequest], - ~.ListVolumeSnapshotsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_volume_snapshots' not in self._stubs: - self._stubs['list_volume_snapshots'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListVolumeSnapshots', - request_serializer=volume_snapshot.ListVolumeSnapshotsRequest.serialize, - response_deserializer=volume_snapshot.ListVolumeSnapshotsResponse.deserialize, - ) - return self._stubs['list_volume_snapshots'] - - @property - def get_lun(self) -> Callable[ - [lun.GetLunRequest], - lun.Lun]: - r"""Return a callable for the get lun method over gRPC. - - Get details of a single storage logical unit - number(LUN). - - Returns: - Callable[[~.GetLunRequest], - ~.Lun]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_lun' not in self._stubs: - self._stubs['get_lun'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetLun', - request_serializer=lun.GetLunRequest.serialize, - response_deserializer=lun.Lun.deserialize, - ) - return self._stubs['get_lun'] - - @property - def list_luns(self) -> Callable[ - [lun.ListLunsRequest], - lun.ListLunsResponse]: - r"""Return a callable for the list luns method over gRPC. - - List storage volume luns for given storage volume. - - Returns: - Callable[[~.ListLunsRequest], - ~.ListLunsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_luns' not in self._stubs: - self._stubs['list_luns'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListLuns', - request_serializer=lun.ListLunsRequest.serialize, - response_deserializer=lun.ListLunsResponse.deserialize, - ) - return self._stubs['list_luns'] - - @property - def evict_lun(self) -> Callable[ - [lun.EvictLunRequest], - operations_pb2.Operation]: - r"""Return a callable for the evict lun method over gRPC. - - Skips lun's cooloff and deletes it now. - Lun must be in cooloff state. - - Returns: - Callable[[~.EvictLunRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'evict_lun' not in self._stubs: - self._stubs['evict_lun'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictLun', - request_serializer=lun.EvictLunRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['evict_lun'] - - @property - def get_nfs_share(self) -> Callable[ - [nfs_share.GetNfsShareRequest], - nfs_share.NfsShare]: - r"""Return a callable for the get nfs share method over gRPC. - - Get details of a single NFS share. - - Returns: - Callable[[~.GetNfsShareRequest], - ~.NfsShare]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_nfs_share' not in self._stubs: - self._stubs['get_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetNfsShare', - request_serializer=nfs_share.GetNfsShareRequest.serialize, - response_deserializer=nfs_share.NfsShare.deserialize, - ) - return self._stubs['get_nfs_share'] - - @property - def list_nfs_shares(self) -> Callable[ - [nfs_share.ListNfsSharesRequest], - nfs_share.ListNfsSharesResponse]: - r"""Return a callable for the list nfs shares method over gRPC. - - List NFS shares. - - Returns: - Callable[[~.ListNfsSharesRequest], - ~.ListNfsSharesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_nfs_shares' not in self._stubs: - self._stubs['list_nfs_shares'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListNfsShares', - request_serializer=nfs_share.ListNfsSharesRequest.serialize, - response_deserializer=nfs_share.ListNfsSharesResponse.deserialize, - ) - return self._stubs['list_nfs_shares'] - - @property - def update_nfs_share(self) -> Callable[ - [gcb_nfs_share.UpdateNfsShareRequest], - operations_pb2.Operation]: - r"""Return a callable for the update nfs share method over gRPC. - - Update details of a single NFS share. - - Returns: - Callable[[~.UpdateNfsShareRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_nfs_share' not in self._stubs: - self._stubs['update_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateNfsShare', - request_serializer=gcb_nfs_share.UpdateNfsShareRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_nfs_share'] - - @property - def create_nfs_share(self) -> Callable[ - [gcb_nfs_share.CreateNfsShareRequest], - operations_pb2.Operation]: - r"""Return a callable for the create nfs share method over gRPC. - - Create an NFS share. - - Returns: - Callable[[~.CreateNfsShareRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_nfs_share' not in self._stubs: - self._stubs['create_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateNfsShare', - request_serializer=gcb_nfs_share.CreateNfsShareRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_nfs_share'] - - @property - def rename_nfs_share(self) -> Callable[ - [nfs_share.RenameNfsShareRequest], - nfs_share.NfsShare]: - r"""Return a callable for the rename nfs share method over gRPC. - - RenameNfsShare sets a new name for an nfsshare. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameNfsShareRequest], - ~.NfsShare]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_nfs_share' not in self._stubs: - self._stubs['rename_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNfsShare', - request_serializer=nfs_share.RenameNfsShareRequest.serialize, - response_deserializer=nfs_share.NfsShare.deserialize, - ) - return self._stubs['rename_nfs_share'] - - @property - def delete_nfs_share(self) -> Callable[ - [nfs_share.DeleteNfsShareRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete nfs share method over gRPC. - - Delete an NFS share. The underlying volume is - automatically deleted. - - Returns: - Callable[[~.DeleteNfsShareRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_nfs_share' not in self._stubs: - self._stubs['delete_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteNfsShare', - request_serializer=nfs_share.DeleteNfsShareRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_nfs_share'] - - @property - def list_provisioning_quotas(self) -> Callable[ - [provisioning.ListProvisioningQuotasRequest], - provisioning.ListProvisioningQuotasResponse]: - r"""Return a callable for the list provisioning quotas method over gRPC. - - List the budget details to provision resources on a - given project. - - Returns: - Callable[[~.ListProvisioningQuotasRequest], - ~.ListProvisioningQuotasResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_provisioning_quotas' not in self._stubs: - self._stubs['list_provisioning_quotas'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListProvisioningQuotas', - request_serializer=provisioning.ListProvisioningQuotasRequest.serialize, - response_deserializer=provisioning.ListProvisioningQuotasResponse.deserialize, - ) - return self._stubs['list_provisioning_quotas'] - - @property - def submit_provisioning_config(self) -> Callable[ - [provisioning.SubmitProvisioningConfigRequest], - provisioning.SubmitProvisioningConfigResponse]: - r"""Return a callable for the submit provisioning config method over gRPC. - - Submit a provisiong configuration for a given - project. - - Returns: - Callable[[~.SubmitProvisioningConfigRequest], - ~.SubmitProvisioningConfigResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'submit_provisioning_config' not in self._stubs: - self._stubs['submit_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/SubmitProvisioningConfig', - request_serializer=provisioning.SubmitProvisioningConfigRequest.serialize, - response_deserializer=provisioning.SubmitProvisioningConfigResponse.deserialize, - ) - return self._stubs['submit_provisioning_config'] - - @property - def get_provisioning_config(self) -> Callable[ - [provisioning.GetProvisioningConfigRequest], - provisioning.ProvisioningConfig]: - r"""Return a callable for the get provisioning config method over gRPC. - - Get ProvisioningConfig by name. - - Returns: - Callable[[~.GetProvisioningConfigRequest], - ~.ProvisioningConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_provisioning_config' not in self._stubs: - self._stubs['get_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetProvisioningConfig', - request_serializer=provisioning.GetProvisioningConfigRequest.serialize, - response_deserializer=provisioning.ProvisioningConfig.deserialize, - ) - return self._stubs['get_provisioning_config'] - - @property - def create_provisioning_config(self) -> Callable[ - [provisioning.CreateProvisioningConfigRequest], - provisioning.ProvisioningConfig]: - r"""Return a callable for the create provisioning config method over gRPC. - - Create new ProvisioningConfig. - - Returns: - Callable[[~.CreateProvisioningConfigRequest], - ~.ProvisioningConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_provisioning_config' not in self._stubs: - self._stubs['create_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateProvisioningConfig', - request_serializer=provisioning.CreateProvisioningConfigRequest.serialize, - response_deserializer=provisioning.ProvisioningConfig.deserialize, - ) - return self._stubs['create_provisioning_config'] - - @property - def update_provisioning_config(self) -> Callable[ - [provisioning.UpdateProvisioningConfigRequest], - provisioning.ProvisioningConfig]: - r"""Return a callable for the update provisioning config method over gRPC. - - Update existing ProvisioningConfig. - - Returns: - Callable[[~.UpdateProvisioningConfigRequest], - ~.ProvisioningConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_provisioning_config' not in self._stubs: - self._stubs['update_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateProvisioningConfig', - request_serializer=provisioning.UpdateProvisioningConfigRequest.serialize, - response_deserializer=provisioning.ProvisioningConfig.deserialize, - ) - return self._stubs['update_provisioning_config'] - - @property - def rename_network(self) -> Callable[ - [network.RenameNetworkRequest], - network.Network]: - r"""Return a callable for the rename network method over gRPC. - - RenameNetwork sets a new name for a network. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameNetworkRequest], - ~.Network]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_network' not in self._stubs: - self._stubs['rename_network'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNetwork', - request_serializer=network.RenameNetworkRequest.serialize, - response_deserializer=network.Network.deserialize, - ) - return self._stubs['rename_network'] - - @property - def list_os_images(self) -> Callable[ - [osimage.ListOSImagesRequest], - osimage.ListOSImagesResponse]: - r"""Return a callable for the list os images method over gRPC. - - Retrieves the list of OS images which are currently - approved. - - Returns: - Callable[[~.ListOSImagesRequest], - ~.ListOSImagesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_os_images' not in self._stubs: - self._stubs['list_os_images'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListOSImages', - request_serializer=osimage.ListOSImagesRequest.serialize, - response_deserializer=osimage.ListOSImagesResponse.deserialize, - ) - return self._stubs['list_os_images'] - - def close(self): - self._logged_channel.close() - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BareMetalSolutionGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py deleted file mode 100644 index e48fe778a404..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/grpc_asyncio.py +++ /dev/null @@ -1,1824 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import BareMetalSolutionTransport, DEFAULT_CLIENT_INFO -from .grpc import BareMetalSolutionGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BareMetalSolutionGrpcAsyncIOTransport(BareMetalSolutionTransport): - """gRPC AsyncIO backend transport for BareMetalSolution. - - Performs management operations on Bare Metal Solution servers. - - The ``baremetalsolution.googleapis.com`` service provides management - capabilities for Bare Metal Solution servers. To access the API - methods, you must assign Bare Metal Solution IAM roles containing - the desired permissions to your staff in your Google Cloud project. - You must also enable the Bare Metal Solution API. Once enabled, the - methods act upon specific servers in your Bare Metal Solution - environment. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'baremetalsolution.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'baremetalsolution.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'baremetalsolution.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_instances(self) -> Callable[ - [instance.ListInstancesRequest], - Awaitable[instance.ListInstancesResponse]]: - r"""Return a callable for the list instances method over gRPC. - - List servers in a given project and location. - - Returns: - Callable[[~.ListInstancesRequest], - Awaitable[~.ListInstancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListInstances', - request_serializer=instance.ListInstancesRequest.serialize, - response_deserializer=instance.ListInstancesResponse.deserialize, - ) - return self._stubs['list_instances'] - - @property - def get_instance(self) -> Callable[ - [instance.GetInstanceRequest], - Awaitable[instance.Instance]]: - r"""Return a callable for the get instance method over gRPC. - - Get details about a single server. - - Returns: - Callable[[~.GetInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetInstance', - request_serializer=instance.GetInstanceRequest.serialize, - response_deserializer=instance.Instance.deserialize, - ) - return self._stubs['get_instance'] - - @property - def update_instance(self) -> Callable[ - [gcb_instance.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update instance method over gRPC. - - Update details of a single server. - - Returns: - Callable[[~.UpdateInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateInstance', - request_serializer=gcb_instance.UpdateInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_instance'] - - @property - def rename_instance(self) -> Callable[ - [instance.RenameInstanceRequest], - Awaitable[instance.Instance]]: - r"""Return a callable for the rename instance method over gRPC. - - RenameInstance sets a new name for an instance. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameInstanceRequest], - Awaitable[~.Instance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_instance' not in self._stubs: - self._stubs['rename_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameInstance', - request_serializer=instance.RenameInstanceRequest.serialize, - response_deserializer=instance.Instance.deserialize, - ) - return self._stubs['rename_instance'] - - @property - def reset_instance(self) -> Callable[ - [instance.ResetInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the reset instance method over gRPC. - - Perform an ungraceful, hard reset on a server. - Equivalent to shutting the power off and then turning it - back on. - - Returns: - Callable[[~.ResetInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reset_instance' not in self._stubs: - self._stubs['reset_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ResetInstance', - request_serializer=instance.ResetInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['reset_instance'] - - @property - def start_instance(self) -> Callable[ - [instance.StartInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the start instance method over gRPC. - - Starts a server that was shutdown. - - Returns: - Callable[[~.StartInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_instance' not in self._stubs: - self._stubs['start_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/StartInstance', - request_serializer=instance.StartInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['start_instance'] - - @property - def stop_instance(self) -> Callable[ - [instance.StopInstanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the stop instance method over gRPC. - - Stop a running server. - - Returns: - Callable[[~.StopInstanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'stop_instance' not in self._stubs: - self._stubs['stop_instance'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/StopInstance', - request_serializer=instance.StopInstanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['stop_instance'] - - @property - def enable_interactive_serial_console(self) -> Callable[ - [instance.EnableInteractiveSerialConsoleRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the enable interactive serial - console method over gRPC. - - Enable the interactive serial console feature on an - instance. - - Returns: - Callable[[~.EnableInteractiveSerialConsoleRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'enable_interactive_serial_console' not in self._stubs: - self._stubs['enable_interactive_serial_console'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/EnableInteractiveSerialConsole', - request_serializer=instance.EnableInteractiveSerialConsoleRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['enable_interactive_serial_console'] - - @property - def disable_interactive_serial_console(self) -> Callable[ - [instance.DisableInteractiveSerialConsoleRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the disable interactive serial - console method over gRPC. - - Disable the interactive serial console feature on an - instance. - - Returns: - Callable[[~.DisableInteractiveSerialConsoleRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'disable_interactive_serial_console' not in self._stubs: - self._stubs['disable_interactive_serial_console'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DisableInteractiveSerialConsole', - request_serializer=instance.DisableInteractiveSerialConsoleRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['disable_interactive_serial_console'] - - @property - def detach_lun(self) -> Callable[ - [gcb_instance.DetachLunRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the detach lun method over gRPC. - - Detach LUN from Instance. - - Returns: - Callable[[~.DetachLunRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'detach_lun' not in self._stubs: - self._stubs['detach_lun'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DetachLun', - request_serializer=gcb_instance.DetachLunRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['detach_lun'] - - @property - def list_ssh_keys(self) -> Callable[ - [ssh_key.ListSSHKeysRequest], - Awaitable[ssh_key.ListSSHKeysResponse]]: - r"""Return a callable for the list ssh keys method over gRPC. - - Lists the public SSH keys registered for the - specified project. These SSH keys are used only for the - interactive serial console feature. - - Returns: - Callable[[~.ListSSHKeysRequest], - Awaitable[~.ListSSHKeysResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_ssh_keys' not in self._stubs: - self._stubs['list_ssh_keys'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListSSHKeys', - request_serializer=ssh_key.ListSSHKeysRequest.serialize, - response_deserializer=ssh_key.ListSSHKeysResponse.deserialize, - ) - return self._stubs['list_ssh_keys'] - - @property - def create_ssh_key(self) -> Callable[ - [gcb_ssh_key.CreateSSHKeyRequest], - Awaitable[gcb_ssh_key.SSHKey]]: - r"""Return a callable for the create ssh key method over gRPC. - - Register a public SSH key in the specified project - for use with the interactive serial console feature. - - Returns: - Callable[[~.CreateSSHKeyRequest], - Awaitable[~.SSHKey]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_ssh_key' not in self._stubs: - self._stubs['create_ssh_key'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateSSHKey', - request_serializer=gcb_ssh_key.CreateSSHKeyRequest.serialize, - response_deserializer=gcb_ssh_key.SSHKey.deserialize, - ) - return self._stubs['create_ssh_key'] - - @property - def delete_ssh_key(self) -> Callable[ - [ssh_key.DeleteSSHKeyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete ssh key method over gRPC. - - Deletes a public SSH key registered in the specified - project. - - Returns: - Callable[[~.DeleteSSHKeyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_ssh_key' not in self._stubs: - self._stubs['delete_ssh_key'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteSSHKey', - request_serializer=ssh_key.DeleteSSHKeyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_ssh_key'] - - @property - def list_volumes(self) -> Callable[ - [volume.ListVolumesRequest], - Awaitable[volume.ListVolumesResponse]]: - r"""Return a callable for the list volumes method over gRPC. - - List storage volumes in a given project and location. - - Returns: - Callable[[~.ListVolumesRequest], - Awaitable[~.ListVolumesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_volumes' not in self._stubs: - self._stubs['list_volumes'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListVolumes', - request_serializer=volume.ListVolumesRequest.serialize, - response_deserializer=volume.ListVolumesResponse.deserialize, - ) - return self._stubs['list_volumes'] - - @property - def get_volume(self) -> Callable[ - [volume.GetVolumeRequest], - Awaitable[volume.Volume]]: - r"""Return a callable for the get volume method over gRPC. - - Get details of a single storage volume. - - Returns: - Callable[[~.GetVolumeRequest], - Awaitable[~.Volume]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_volume' not in self._stubs: - self._stubs['get_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetVolume', - request_serializer=volume.GetVolumeRequest.serialize, - response_deserializer=volume.Volume.deserialize, - ) - return self._stubs['get_volume'] - - @property - def update_volume(self) -> Callable[ - [gcb_volume.UpdateVolumeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update volume method over gRPC. - - Update details of a single storage volume. - - Returns: - Callable[[~.UpdateVolumeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_volume' not in self._stubs: - self._stubs['update_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateVolume', - request_serializer=gcb_volume.UpdateVolumeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_volume'] - - @property - def rename_volume(self) -> Callable[ - [volume.RenameVolumeRequest], - Awaitable[volume.Volume]]: - r"""Return a callable for the rename volume method over gRPC. - - RenameVolume sets a new name for a volume. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameVolumeRequest], - Awaitable[~.Volume]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_volume' not in self._stubs: - self._stubs['rename_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameVolume', - request_serializer=volume.RenameVolumeRequest.serialize, - response_deserializer=volume.Volume.deserialize, - ) - return self._stubs['rename_volume'] - - @property - def evict_volume(self) -> Callable[ - [volume.EvictVolumeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the evict volume method over gRPC. - - Skips volume's cooloff and deletes it now. - Volume must be in cooloff state. - - Returns: - Callable[[~.EvictVolumeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'evict_volume' not in self._stubs: - self._stubs['evict_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictVolume', - request_serializer=volume.EvictVolumeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['evict_volume'] - - @property - def resize_volume(self) -> Callable[ - [gcb_volume.ResizeVolumeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the resize volume method over gRPC. - - Emergency Volume resize. - - Returns: - Callable[[~.ResizeVolumeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resize_volume' not in self._stubs: - self._stubs['resize_volume'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ResizeVolume', - request_serializer=gcb_volume.ResizeVolumeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['resize_volume'] - - @property - def list_networks(self) -> Callable[ - [network.ListNetworksRequest], - Awaitable[network.ListNetworksResponse]]: - r"""Return a callable for the list networks method over gRPC. - - List network in a given project and location. - - Returns: - Callable[[~.ListNetworksRequest], - Awaitable[~.ListNetworksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_networks' not in self._stubs: - self._stubs['list_networks'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListNetworks', - request_serializer=network.ListNetworksRequest.serialize, - response_deserializer=network.ListNetworksResponse.deserialize, - ) - return self._stubs['list_networks'] - - @property - def list_network_usage(self) -> Callable[ - [network.ListNetworkUsageRequest], - Awaitable[network.ListNetworkUsageResponse]]: - r"""Return a callable for the list network usage method over gRPC. - - List all Networks (and used IPs for each Network) in - the vendor account associated with the specified - project. - - Returns: - Callable[[~.ListNetworkUsageRequest], - Awaitable[~.ListNetworkUsageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_network_usage' not in self._stubs: - self._stubs['list_network_usage'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListNetworkUsage', - request_serializer=network.ListNetworkUsageRequest.serialize, - response_deserializer=network.ListNetworkUsageResponse.deserialize, - ) - return self._stubs['list_network_usage'] - - @property - def get_network(self) -> Callable[ - [network.GetNetworkRequest], - Awaitable[network.Network]]: - r"""Return a callable for the get network method over gRPC. - - Get details of a single network. - - Returns: - Callable[[~.GetNetworkRequest], - Awaitable[~.Network]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_network' not in self._stubs: - self._stubs['get_network'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetNetwork', - request_serializer=network.GetNetworkRequest.serialize, - response_deserializer=network.Network.deserialize, - ) - return self._stubs['get_network'] - - @property - def update_network(self) -> Callable[ - [gcb_network.UpdateNetworkRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update network method over gRPC. - - Update details of a single network. - - Returns: - Callable[[~.UpdateNetworkRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_network' not in self._stubs: - self._stubs['update_network'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateNetwork', - request_serializer=gcb_network.UpdateNetworkRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_network'] - - @property - def create_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.CreateVolumeSnapshotRequest], - Awaitable[gcb_volume_snapshot.VolumeSnapshot]]: - r"""Return a callable for the create volume snapshot method over gRPC. - - Takes a snapshot of a boot volume. Returns INVALID_ARGUMENT if - called for a non-boot volume. - - Returns: - Callable[[~.CreateVolumeSnapshotRequest], - Awaitable[~.VolumeSnapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_volume_snapshot' not in self._stubs: - self._stubs['create_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateVolumeSnapshot', - request_serializer=gcb_volume_snapshot.CreateVolumeSnapshotRequest.serialize, - response_deserializer=gcb_volume_snapshot.VolumeSnapshot.deserialize, - ) - return self._stubs['create_volume_snapshot'] - - @property - def restore_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the restore volume snapshot method over gRPC. - - Uses the specified snapshot to restore its parent volume. - Returns INVALID_ARGUMENT if called for a non-boot volume. - - Returns: - Callable[[~.RestoreVolumeSnapshotRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'restore_volume_snapshot' not in self._stubs: - self._stubs['restore_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RestoreVolumeSnapshot', - request_serializer=gcb_volume_snapshot.RestoreVolumeSnapshotRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['restore_volume_snapshot'] - - @property - def delete_volume_snapshot(self) -> Callable[ - [volume_snapshot.DeleteVolumeSnapshotRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete volume snapshot method over gRPC. - - Deletes a volume snapshot. Returns INVALID_ARGUMENT if called - for a non-boot volume. - - Returns: - Callable[[~.DeleteVolumeSnapshotRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_volume_snapshot' not in self._stubs: - self._stubs['delete_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteVolumeSnapshot', - request_serializer=volume_snapshot.DeleteVolumeSnapshotRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_volume_snapshot'] - - @property - def get_volume_snapshot(self) -> Callable[ - [volume_snapshot.GetVolumeSnapshotRequest], - Awaitable[volume_snapshot.VolumeSnapshot]]: - r"""Return a callable for the get volume snapshot method over gRPC. - - Returns the specified snapshot resource. Returns - INVALID_ARGUMENT if called for a non-boot volume. - - Returns: - Callable[[~.GetVolumeSnapshotRequest], - Awaitable[~.VolumeSnapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_volume_snapshot' not in self._stubs: - self._stubs['get_volume_snapshot'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetVolumeSnapshot', - request_serializer=volume_snapshot.GetVolumeSnapshotRequest.serialize, - response_deserializer=volume_snapshot.VolumeSnapshot.deserialize, - ) - return self._stubs['get_volume_snapshot'] - - @property - def list_volume_snapshots(self) -> Callable[ - [volume_snapshot.ListVolumeSnapshotsRequest], - Awaitable[volume_snapshot.ListVolumeSnapshotsResponse]]: - r"""Return a callable for the list volume snapshots method over gRPC. - - Retrieves the list of snapshots for the specified - volume. Returns a response with an empty list of - snapshots if called for a non-boot volume. - - Returns: - Callable[[~.ListVolumeSnapshotsRequest], - Awaitable[~.ListVolumeSnapshotsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_volume_snapshots' not in self._stubs: - self._stubs['list_volume_snapshots'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListVolumeSnapshots', - request_serializer=volume_snapshot.ListVolumeSnapshotsRequest.serialize, - response_deserializer=volume_snapshot.ListVolumeSnapshotsResponse.deserialize, - ) - return self._stubs['list_volume_snapshots'] - - @property - def get_lun(self) -> Callable[ - [lun.GetLunRequest], - Awaitable[lun.Lun]]: - r"""Return a callable for the get lun method over gRPC. - - Get details of a single storage logical unit - number(LUN). - - Returns: - Callable[[~.GetLunRequest], - Awaitable[~.Lun]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_lun' not in self._stubs: - self._stubs['get_lun'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetLun', - request_serializer=lun.GetLunRequest.serialize, - response_deserializer=lun.Lun.deserialize, - ) - return self._stubs['get_lun'] - - @property - def list_luns(self) -> Callable[ - [lun.ListLunsRequest], - Awaitable[lun.ListLunsResponse]]: - r"""Return a callable for the list luns method over gRPC. - - List storage volume luns for given storage volume. - - Returns: - Callable[[~.ListLunsRequest], - Awaitable[~.ListLunsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_luns' not in self._stubs: - self._stubs['list_luns'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListLuns', - request_serializer=lun.ListLunsRequest.serialize, - response_deserializer=lun.ListLunsResponse.deserialize, - ) - return self._stubs['list_luns'] - - @property - def evict_lun(self) -> Callable[ - [lun.EvictLunRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the evict lun method over gRPC. - - Skips lun's cooloff and deletes it now. - Lun must be in cooloff state. - - Returns: - Callable[[~.EvictLunRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'evict_lun' not in self._stubs: - self._stubs['evict_lun'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/EvictLun', - request_serializer=lun.EvictLunRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['evict_lun'] - - @property - def get_nfs_share(self) -> Callable[ - [nfs_share.GetNfsShareRequest], - Awaitable[nfs_share.NfsShare]]: - r"""Return a callable for the get nfs share method over gRPC. - - Get details of a single NFS share. - - Returns: - Callable[[~.GetNfsShareRequest], - Awaitable[~.NfsShare]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_nfs_share' not in self._stubs: - self._stubs['get_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetNfsShare', - request_serializer=nfs_share.GetNfsShareRequest.serialize, - response_deserializer=nfs_share.NfsShare.deserialize, - ) - return self._stubs['get_nfs_share'] - - @property - def list_nfs_shares(self) -> Callable[ - [nfs_share.ListNfsSharesRequest], - Awaitable[nfs_share.ListNfsSharesResponse]]: - r"""Return a callable for the list nfs shares method over gRPC. - - List NFS shares. - - Returns: - Callable[[~.ListNfsSharesRequest], - Awaitable[~.ListNfsSharesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_nfs_shares' not in self._stubs: - self._stubs['list_nfs_shares'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListNfsShares', - request_serializer=nfs_share.ListNfsSharesRequest.serialize, - response_deserializer=nfs_share.ListNfsSharesResponse.deserialize, - ) - return self._stubs['list_nfs_shares'] - - @property - def update_nfs_share(self) -> Callable[ - [gcb_nfs_share.UpdateNfsShareRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update nfs share method over gRPC. - - Update details of a single NFS share. - - Returns: - Callable[[~.UpdateNfsShareRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_nfs_share' not in self._stubs: - self._stubs['update_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateNfsShare', - request_serializer=gcb_nfs_share.UpdateNfsShareRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_nfs_share'] - - @property - def create_nfs_share(self) -> Callable[ - [gcb_nfs_share.CreateNfsShareRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create nfs share method over gRPC. - - Create an NFS share. - - Returns: - Callable[[~.CreateNfsShareRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_nfs_share' not in self._stubs: - self._stubs['create_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateNfsShare', - request_serializer=gcb_nfs_share.CreateNfsShareRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_nfs_share'] - - @property - def rename_nfs_share(self) -> Callable[ - [nfs_share.RenameNfsShareRequest], - Awaitable[nfs_share.NfsShare]]: - r"""Return a callable for the rename nfs share method over gRPC. - - RenameNfsShare sets a new name for an nfsshare. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameNfsShareRequest], - Awaitable[~.NfsShare]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_nfs_share' not in self._stubs: - self._stubs['rename_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNfsShare', - request_serializer=nfs_share.RenameNfsShareRequest.serialize, - response_deserializer=nfs_share.NfsShare.deserialize, - ) - return self._stubs['rename_nfs_share'] - - @property - def delete_nfs_share(self) -> Callable[ - [nfs_share.DeleteNfsShareRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete nfs share method over gRPC. - - Delete an NFS share. The underlying volume is - automatically deleted. - - Returns: - Callable[[~.DeleteNfsShareRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_nfs_share' not in self._stubs: - self._stubs['delete_nfs_share'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/DeleteNfsShare', - request_serializer=nfs_share.DeleteNfsShareRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_nfs_share'] - - @property - def list_provisioning_quotas(self) -> Callable[ - [provisioning.ListProvisioningQuotasRequest], - Awaitable[provisioning.ListProvisioningQuotasResponse]]: - r"""Return a callable for the list provisioning quotas method over gRPC. - - List the budget details to provision resources on a - given project. - - Returns: - Callable[[~.ListProvisioningQuotasRequest], - Awaitable[~.ListProvisioningQuotasResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_provisioning_quotas' not in self._stubs: - self._stubs['list_provisioning_quotas'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListProvisioningQuotas', - request_serializer=provisioning.ListProvisioningQuotasRequest.serialize, - response_deserializer=provisioning.ListProvisioningQuotasResponse.deserialize, - ) - return self._stubs['list_provisioning_quotas'] - - @property - def submit_provisioning_config(self) -> Callable[ - [provisioning.SubmitProvisioningConfigRequest], - Awaitable[provisioning.SubmitProvisioningConfigResponse]]: - r"""Return a callable for the submit provisioning config method over gRPC. - - Submit a provisiong configuration for a given - project. - - Returns: - Callable[[~.SubmitProvisioningConfigRequest], - Awaitable[~.SubmitProvisioningConfigResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'submit_provisioning_config' not in self._stubs: - self._stubs['submit_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/SubmitProvisioningConfig', - request_serializer=provisioning.SubmitProvisioningConfigRequest.serialize, - response_deserializer=provisioning.SubmitProvisioningConfigResponse.deserialize, - ) - return self._stubs['submit_provisioning_config'] - - @property - def get_provisioning_config(self) -> Callable[ - [provisioning.GetProvisioningConfigRequest], - Awaitable[provisioning.ProvisioningConfig]]: - r"""Return a callable for the get provisioning config method over gRPC. - - Get ProvisioningConfig by name. - - Returns: - Callable[[~.GetProvisioningConfigRequest], - Awaitable[~.ProvisioningConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_provisioning_config' not in self._stubs: - self._stubs['get_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/GetProvisioningConfig', - request_serializer=provisioning.GetProvisioningConfigRequest.serialize, - response_deserializer=provisioning.ProvisioningConfig.deserialize, - ) - return self._stubs['get_provisioning_config'] - - @property - def create_provisioning_config(self) -> Callable[ - [provisioning.CreateProvisioningConfigRequest], - Awaitable[provisioning.ProvisioningConfig]]: - r"""Return a callable for the create provisioning config method over gRPC. - - Create new ProvisioningConfig. - - Returns: - Callable[[~.CreateProvisioningConfigRequest], - Awaitable[~.ProvisioningConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_provisioning_config' not in self._stubs: - self._stubs['create_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/CreateProvisioningConfig', - request_serializer=provisioning.CreateProvisioningConfigRequest.serialize, - response_deserializer=provisioning.ProvisioningConfig.deserialize, - ) - return self._stubs['create_provisioning_config'] - - @property - def update_provisioning_config(self) -> Callable[ - [provisioning.UpdateProvisioningConfigRequest], - Awaitable[provisioning.ProvisioningConfig]]: - r"""Return a callable for the update provisioning config method over gRPC. - - Update existing ProvisioningConfig. - - Returns: - Callable[[~.UpdateProvisioningConfigRequest], - Awaitable[~.ProvisioningConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_provisioning_config' not in self._stubs: - self._stubs['update_provisioning_config'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/UpdateProvisioningConfig', - request_serializer=provisioning.UpdateProvisioningConfigRequest.serialize, - response_deserializer=provisioning.ProvisioningConfig.deserialize, - ) - return self._stubs['update_provisioning_config'] - - @property - def rename_network(self) -> Callable[ - [network.RenameNetworkRequest], - Awaitable[network.Network]]: - r"""Return a callable for the rename network method over gRPC. - - RenameNetwork sets a new name for a network. - Use with caution, previous names become immediately - invalidated. - - Returns: - Callable[[~.RenameNetworkRequest], - Awaitable[~.Network]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_network' not in self._stubs: - self._stubs['rename_network'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/RenameNetwork', - request_serializer=network.RenameNetworkRequest.serialize, - response_deserializer=network.Network.deserialize, - ) - return self._stubs['rename_network'] - - @property - def list_os_images(self) -> Callable[ - [osimage.ListOSImagesRequest], - Awaitable[osimage.ListOSImagesResponse]]: - r"""Return a callable for the list os images method over gRPC. - - Retrieves the list of OS images which are currently - approved. - - Returns: - Callable[[~.ListOSImagesRequest], - Awaitable[~.ListOSImagesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_os_images' not in self._stubs: - self._stubs['list_os_images'] = self._logged_channel.unary_unary( - '/google.cloud.baremetalsolution.v2.BareMetalSolution/ListOSImages', - request_serializer=osimage.ListOSImagesRequest.serialize, - response_deserializer=osimage.ListOSImagesResponse.deserialize, - ) - return self._stubs['list_os_images'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_instances: self._wrap_method( - self.list_instances, - default_timeout=None, - client_info=client_info, - ), - self.get_instance: self._wrap_method( - self.get_instance, - default_timeout=None, - client_info=client_info, - ), - self.update_instance: self._wrap_method( - self.update_instance, - default_timeout=None, - client_info=client_info, - ), - self.rename_instance: self._wrap_method( - self.rename_instance, - default_timeout=None, - client_info=client_info, - ), - self.reset_instance: self._wrap_method( - self.reset_instance, - default_timeout=None, - client_info=client_info, - ), - self.start_instance: self._wrap_method( - self.start_instance, - default_timeout=None, - client_info=client_info, - ), - self.stop_instance: self._wrap_method( - self.stop_instance, - default_timeout=None, - client_info=client_info, - ), - self.enable_interactive_serial_console: self._wrap_method( - self.enable_interactive_serial_console, - default_timeout=None, - client_info=client_info, - ), - self.disable_interactive_serial_console: self._wrap_method( - self.disable_interactive_serial_console, - default_timeout=None, - client_info=client_info, - ), - self.detach_lun: self._wrap_method( - self.detach_lun, - default_timeout=None, - client_info=client_info, - ), - self.list_ssh_keys: self._wrap_method( - self.list_ssh_keys, - default_timeout=None, - client_info=client_info, - ), - self.create_ssh_key: self._wrap_method( - self.create_ssh_key, - default_timeout=None, - client_info=client_info, - ), - self.delete_ssh_key: self._wrap_method( - self.delete_ssh_key, - default_timeout=None, - client_info=client_info, - ), - self.list_volumes: self._wrap_method( - self.list_volumes, - default_timeout=None, - client_info=client_info, - ), - self.get_volume: self._wrap_method( - self.get_volume, - default_timeout=None, - client_info=client_info, - ), - self.update_volume: self._wrap_method( - self.update_volume, - default_timeout=None, - client_info=client_info, - ), - self.rename_volume: self._wrap_method( - self.rename_volume, - default_timeout=None, - client_info=client_info, - ), - self.evict_volume: self._wrap_method( - self.evict_volume, - default_timeout=None, - client_info=client_info, - ), - self.resize_volume: self._wrap_method( - self.resize_volume, - default_timeout=None, - client_info=client_info, - ), - self.list_networks: self._wrap_method( - self.list_networks, - default_timeout=None, - client_info=client_info, - ), - self.list_network_usage: self._wrap_method( - self.list_network_usage, - default_timeout=None, - client_info=client_info, - ), - self.get_network: self._wrap_method( - self.get_network, - default_timeout=None, - client_info=client_info, - ), - self.update_network: self._wrap_method( - self.update_network, - default_timeout=None, - client_info=client_info, - ), - self.create_volume_snapshot: self._wrap_method( - self.create_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.restore_volume_snapshot: self._wrap_method( - self.restore_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.delete_volume_snapshot: self._wrap_method( - self.delete_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.get_volume_snapshot: self._wrap_method( - self.get_volume_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.list_volume_snapshots: self._wrap_method( - self.list_volume_snapshots, - default_timeout=None, - client_info=client_info, - ), - self.get_lun: self._wrap_method( - self.get_lun, - default_timeout=None, - client_info=client_info, - ), - self.list_luns: self._wrap_method( - self.list_luns, - default_timeout=None, - client_info=client_info, - ), - self.evict_lun: self._wrap_method( - self.evict_lun, - default_timeout=None, - client_info=client_info, - ), - self.get_nfs_share: self._wrap_method( - self.get_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.list_nfs_shares: self._wrap_method( - self.list_nfs_shares, - default_timeout=None, - client_info=client_info, - ), - self.update_nfs_share: self._wrap_method( - self.update_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.create_nfs_share: self._wrap_method( - self.create_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.rename_nfs_share: self._wrap_method( - self.rename_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.delete_nfs_share: self._wrap_method( - self.delete_nfs_share, - default_timeout=None, - client_info=client_info, - ), - self.list_provisioning_quotas: self._wrap_method( - self.list_provisioning_quotas, - default_timeout=None, - client_info=client_info, - ), - self.submit_provisioning_config: self._wrap_method( - self.submit_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.get_provisioning_config: self._wrap_method( - self.get_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.create_provisioning_config: self._wrap_method( - self.create_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.update_provisioning_config: self._wrap_method( - self.update_provisioning_config, - default_timeout=None, - client_info=client_info, - ), - self.rename_network: self._wrap_method( - self.rename_network, - default_timeout=None, - client_info=client_info, - ), - self.list_os_images: self._wrap_method( - self.list_os_images, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'BareMetalSolutionGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py deleted file mode 100644 index a8ea5bc661e1..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest.py +++ /dev/null @@ -1,8052 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseBareMetalSolutionRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class BareMetalSolutionRestInterceptor: - """Interceptor for BareMetalSolution. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the BareMetalSolutionRestTransport. - - .. code-block:: python - class MyCustomBareMetalSolutionInterceptor(BareMetalSolutionRestInterceptor): - def pre_create_nfs_share(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_nfs_share(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_provisioning_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_provisioning_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_ssh_key(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_ssh_key(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_volume_snapshot(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_volume_snapshot(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_nfs_share(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_nfs_share(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_ssh_key(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_volume_snapshot(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_detach_lun(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_detach_lun(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_disable_interactive_serial_console(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_disable_interactive_serial_console(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_enable_interactive_serial_console(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_enable_interactive_serial_console(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_evict_lun(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_evict_lun(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_evict_volume(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_evict_volume(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_lun(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_lun(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_network(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_network(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_nfs_share(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_nfs_share(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_provisioning_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_provisioning_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_volume(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_volume(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_volume_snapshot(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_volume_snapshot(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_instances(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_instances(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_luns(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_luns(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_networks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_networks(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_network_usage(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_network_usage(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_nfs_shares(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_nfs_shares(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_os_images(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_os_images(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_provisioning_quotas(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_provisioning_quotas(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_ssh_keys(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_ssh_keys(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_volumes(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_volumes(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_volume_snapshots(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_volume_snapshots(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_network(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_network(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_nfs_share(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_nfs_share(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_volume(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_volume(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_reset_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_reset_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_resize_volume(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_resize_volume(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_restore_volume_snapshot(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_restore_volume_snapshot(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_start_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_start_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_stop_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_stop_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_submit_provisioning_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_submit_provisioning_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_instance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_instance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_network(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_network(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_nfs_share(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_nfs_share(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_provisioning_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_provisioning_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_volume(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_volume(self, response): - logging.log(f"Received response: {response}") - return response - - transport = BareMetalSolutionRestTransport(interceptor=MyCustomBareMetalSolutionInterceptor()) - client = BareMetalSolutionClient(transport=transport) - - - """ - def pre_create_nfs_share(self, request: gcb_nfs_share.CreateNfsShareRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_nfs_share.CreateNfsShareRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_nfs_share - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_create_nfs_share(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_nfs_share - - DEPRECATED. Please use the `post_create_nfs_share_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_create_nfs_share` interceptor runs - before the `post_create_nfs_share_with_metadata` interceptor. - """ - return response - - def post_create_nfs_share_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_nfs_share - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_create_nfs_share_with_metadata` - interceptor in new development instead of the `post_create_nfs_share` interceptor. - When both interceptors are used, this `post_create_nfs_share_with_metadata` interceptor runs after the - `post_create_nfs_share` interceptor. The (possibly modified) response returned by - `post_create_nfs_share` will be passed to - `post_create_nfs_share_with_metadata`. - """ - return response, metadata - - def pre_create_provisioning_config(self, request: provisioning.CreateProvisioningConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.CreateProvisioningConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_provisioning_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_create_provisioning_config(self, response: provisioning.ProvisioningConfig) -> provisioning.ProvisioningConfig: - """Post-rpc interceptor for create_provisioning_config - - DEPRECATED. Please use the `post_create_provisioning_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_create_provisioning_config` interceptor runs - before the `post_create_provisioning_config_with_metadata` interceptor. - """ - return response - - def post_create_provisioning_config_with_metadata(self, response: provisioning.ProvisioningConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.ProvisioningConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_provisioning_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_create_provisioning_config_with_metadata` - interceptor in new development instead of the `post_create_provisioning_config` interceptor. - When both interceptors are used, this `post_create_provisioning_config_with_metadata` interceptor runs after the - `post_create_provisioning_config` interceptor. The (possibly modified) response returned by - `post_create_provisioning_config` will be passed to - `post_create_provisioning_config_with_metadata`. - """ - return response, metadata - - def pre_create_ssh_key(self, request: gcb_ssh_key.CreateSSHKeyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_ssh_key.CreateSSHKeyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_ssh_key - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_create_ssh_key(self, response: gcb_ssh_key.SSHKey) -> gcb_ssh_key.SSHKey: - """Post-rpc interceptor for create_ssh_key - - DEPRECATED. Please use the `post_create_ssh_key_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_create_ssh_key` interceptor runs - before the `post_create_ssh_key_with_metadata` interceptor. - """ - return response - - def post_create_ssh_key_with_metadata(self, response: gcb_ssh_key.SSHKey, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_ssh_key.SSHKey, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_ssh_key - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_create_ssh_key_with_metadata` - interceptor in new development instead of the `post_create_ssh_key` interceptor. - When both interceptors are used, this `post_create_ssh_key_with_metadata` interceptor runs after the - `post_create_ssh_key` interceptor. The (possibly modified) response returned by - `post_create_ssh_key` will be passed to - `post_create_ssh_key_with_metadata`. - """ - return response, metadata - - def pre_create_volume_snapshot(self, request: gcb_volume_snapshot.CreateVolumeSnapshotRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_volume_snapshot.CreateVolumeSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_volume_snapshot - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_create_volume_snapshot(self, response: gcb_volume_snapshot.VolumeSnapshot) -> gcb_volume_snapshot.VolumeSnapshot: - """Post-rpc interceptor for create_volume_snapshot - - DEPRECATED. Please use the `post_create_volume_snapshot_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_create_volume_snapshot` interceptor runs - before the `post_create_volume_snapshot_with_metadata` interceptor. - """ - return response - - def post_create_volume_snapshot_with_metadata(self, response: gcb_volume_snapshot.VolumeSnapshot, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_volume_snapshot.VolumeSnapshot, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_volume_snapshot - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_create_volume_snapshot_with_metadata` - interceptor in new development instead of the `post_create_volume_snapshot` interceptor. - When both interceptors are used, this `post_create_volume_snapshot_with_metadata` interceptor runs after the - `post_create_volume_snapshot` interceptor. The (possibly modified) response returned by - `post_create_volume_snapshot` will be passed to - `post_create_volume_snapshot_with_metadata`. - """ - return response, metadata - - def pre_delete_nfs_share(self, request: nfs_share.DeleteNfsShareRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.DeleteNfsShareRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_nfs_share - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_delete_nfs_share(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_nfs_share - - DEPRECATED. Please use the `post_delete_nfs_share_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_delete_nfs_share` interceptor runs - before the `post_delete_nfs_share_with_metadata` interceptor. - """ - return response - - def post_delete_nfs_share_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_nfs_share - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_delete_nfs_share_with_metadata` - interceptor in new development instead of the `post_delete_nfs_share` interceptor. - When both interceptors are used, this `post_delete_nfs_share_with_metadata` interceptor runs after the - `post_delete_nfs_share` interceptor. The (possibly modified) response returned by - `post_delete_nfs_share` will be passed to - `post_delete_nfs_share_with_metadata`. - """ - return response, metadata - - def pre_delete_ssh_key(self, request: ssh_key.DeleteSSHKeyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[ssh_key.DeleteSSHKeyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_ssh_key - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def pre_delete_volume_snapshot(self, request: volume_snapshot.DeleteVolumeSnapshotRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume_snapshot.DeleteVolumeSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_volume_snapshot - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def pre_detach_lun(self, request: gcb_instance.DetachLunRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_instance.DetachLunRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for detach_lun - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_detach_lun(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for detach_lun - - DEPRECATED. Please use the `post_detach_lun_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_detach_lun` interceptor runs - before the `post_detach_lun_with_metadata` interceptor. - """ - return response - - def post_detach_lun_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for detach_lun - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_detach_lun_with_metadata` - interceptor in new development instead of the `post_detach_lun` interceptor. - When both interceptors are used, this `post_detach_lun_with_metadata` interceptor runs after the - `post_detach_lun` interceptor. The (possibly modified) response returned by - `post_detach_lun` will be passed to - `post_detach_lun_with_metadata`. - """ - return response, metadata - - def pre_disable_interactive_serial_console(self, request: instance.DisableInteractiveSerialConsoleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.DisableInteractiveSerialConsoleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for disable_interactive_serial_console - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_disable_interactive_serial_console(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for disable_interactive_serial_console - - DEPRECATED. Please use the `post_disable_interactive_serial_console_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_disable_interactive_serial_console` interceptor runs - before the `post_disable_interactive_serial_console_with_metadata` interceptor. - """ - return response - - def post_disable_interactive_serial_console_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for disable_interactive_serial_console - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_disable_interactive_serial_console_with_metadata` - interceptor in new development instead of the `post_disable_interactive_serial_console` interceptor. - When both interceptors are used, this `post_disable_interactive_serial_console_with_metadata` interceptor runs after the - `post_disable_interactive_serial_console` interceptor. The (possibly modified) response returned by - `post_disable_interactive_serial_console` will be passed to - `post_disable_interactive_serial_console_with_metadata`. - """ - return response, metadata - - def pre_enable_interactive_serial_console(self, request: instance.EnableInteractiveSerialConsoleRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.EnableInteractiveSerialConsoleRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for enable_interactive_serial_console - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_enable_interactive_serial_console(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for enable_interactive_serial_console - - DEPRECATED. Please use the `post_enable_interactive_serial_console_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_enable_interactive_serial_console` interceptor runs - before the `post_enable_interactive_serial_console_with_metadata` interceptor. - """ - return response - - def post_enable_interactive_serial_console_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for enable_interactive_serial_console - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_enable_interactive_serial_console_with_metadata` - interceptor in new development instead of the `post_enable_interactive_serial_console` interceptor. - When both interceptors are used, this `post_enable_interactive_serial_console_with_metadata` interceptor runs after the - `post_enable_interactive_serial_console` interceptor. The (possibly modified) response returned by - `post_enable_interactive_serial_console` will be passed to - `post_enable_interactive_serial_console_with_metadata`. - """ - return response, metadata - - def pre_evict_lun(self, request: lun.EvictLunRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[lun.EvictLunRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for evict_lun - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_evict_lun(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for evict_lun - - DEPRECATED. Please use the `post_evict_lun_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_evict_lun` interceptor runs - before the `post_evict_lun_with_metadata` interceptor. - """ - return response - - def post_evict_lun_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for evict_lun - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_evict_lun_with_metadata` - interceptor in new development instead of the `post_evict_lun` interceptor. - When both interceptors are used, this `post_evict_lun_with_metadata` interceptor runs after the - `post_evict_lun` interceptor. The (possibly modified) response returned by - `post_evict_lun` will be passed to - `post_evict_lun_with_metadata`. - """ - return response, metadata - - def pre_evict_volume(self, request: volume.EvictVolumeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.EvictVolumeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for evict_volume - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_evict_volume(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for evict_volume - - DEPRECATED. Please use the `post_evict_volume_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_evict_volume` interceptor runs - before the `post_evict_volume_with_metadata` interceptor. - """ - return response - - def post_evict_volume_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for evict_volume - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_evict_volume_with_metadata` - interceptor in new development instead of the `post_evict_volume` interceptor. - When both interceptors are used, this `post_evict_volume_with_metadata` interceptor runs after the - `post_evict_volume` interceptor. The (possibly modified) response returned by - `post_evict_volume` will be passed to - `post_evict_volume_with_metadata`. - """ - return response, metadata - - def pre_get_instance(self, request: instance.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_instance(self, response: instance.Instance) -> instance.Instance: - """Post-rpc interceptor for get_instance - - DEPRECATED. Please use the `post_get_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_instance` interceptor runs - before the `post_get_instance_with_metadata` interceptor. - """ - return response - - def post_get_instance_with_metadata(self, response: instance.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_instance_with_metadata` - interceptor in new development instead of the `post_get_instance` interceptor. - When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the - `post_get_instance` interceptor. The (possibly modified) response returned by - `post_get_instance` will be passed to - `post_get_instance_with_metadata`. - """ - return response, metadata - - def pre_get_lun(self, request: lun.GetLunRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[lun.GetLunRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_lun - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_lun(self, response: lun.Lun) -> lun.Lun: - """Post-rpc interceptor for get_lun - - DEPRECATED. Please use the `post_get_lun_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_lun` interceptor runs - before the `post_get_lun_with_metadata` interceptor. - """ - return response - - def post_get_lun_with_metadata(self, response: lun.Lun, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[lun.Lun, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_lun - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_lun_with_metadata` - interceptor in new development instead of the `post_get_lun` interceptor. - When both interceptors are used, this `post_get_lun_with_metadata` interceptor runs after the - `post_get_lun` interceptor. The (possibly modified) response returned by - `post_get_lun` will be passed to - `post_get_lun_with_metadata`. - """ - return response, metadata - - def pre_get_network(self, request: network.GetNetworkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.GetNetworkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_network - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_network(self, response: network.Network) -> network.Network: - """Post-rpc interceptor for get_network - - DEPRECATED. Please use the `post_get_network_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_network` interceptor runs - before the `post_get_network_with_metadata` interceptor. - """ - return response - - def post_get_network_with_metadata(self, response: network.Network, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.Network, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_network - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_network_with_metadata` - interceptor in new development instead of the `post_get_network` interceptor. - When both interceptors are used, this `post_get_network_with_metadata` interceptor runs after the - `post_get_network` interceptor. The (possibly modified) response returned by - `post_get_network` will be passed to - `post_get_network_with_metadata`. - """ - return response, metadata - - def pre_get_nfs_share(self, request: nfs_share.GetNfsShareRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.GetNfsShareRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_nfs_share - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: - """Post-rpc interceptor for get_nfs_share - - DEPRECATED. Please use the `post_get_nfs_share_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_nfs_share` interceptor runs - before the `post_get_nfs_share_with_metadata` interceptor. - """ - return response - - def post_get_nfs_share_with_metadata(self, response: nfs_share.NfsShare, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.NfsShare, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_nfs_share - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_nfs_share_with_metadata` - interceptor in new development instead of the `post_get_nfs_share` interceptor. - When both interceptors are used, this `post_get_nfs_share_with_metadata` interceptor runs after the - `post_get_nfs_share` interceptor. The (possibly modified) response returned by - `post_get_nfs_share` will be passed to - `post_get_nfs_share_with_metadata`. - """ - return response, metadata - - def pre_get_provisioning_config(self, request: provisioning.GetProvisioningConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.GetProvisioningConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_provisioning_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_provisioning_config(self, response: provisioning.ProvisioningConfig) -> provisioning.ProvisioningConfig: - """Post-rpc interceptor for get_provisioning_config - - DEPRECATED. Please use the `post_get_provisioning_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_provisioning_config` interceptor runs - before the `post_get_provisioning_config_with_metadata` interceptor. - """ - return response - - def post_get_provisioning_config_with_metadata(self, response: provisioning.ProvisioningConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.ProvisioningConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_provisioning_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_provisioning_config_with_metadata` - interceptor in new development instead of the `post_get_provisioning_config` interceptor. - When both interceptors are used, this `post_get_provisioning_config_with_metadata` interceptor runs after the - `post_get_provisioning_config` interceptor. The (possibly modified) response returned by - `post_get_provisioning_config` will be passed to - `post_get_provisioning_config_with_metadata`. - """ - return response, metadata - - def pre_get_volume(self, request: volume.GetVolumeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.GetVolumeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_volume - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_volume(self, response: volume.Volume) -> volume.Volume: - """Post-rpc interceptor for get_volume - - DEPRECATED. Please use the `post_get_volume_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_volume` interceptor runs - before the `post_get_volume_with_metadata` interceptor. - """ - return response - - def post_get_volume_with_metadata(self, response: volume.Volume, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.Volume, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_volume - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_volume_with_metadata` - interceptor in new development instead of the `post_get_volume` interceptor. - When both interceptors are used, this `post_get_volume_with_metadata` interceptor runs after the - `post_get_volume` interceptor. The (possibly modified) response returned by - `post_get_volume` will be passed to - `post_get_volume_with_metadata`. - """ - return response, metadata - - def pre_get_volume_snapshot(self, request: volume_snapshot.GetVolumeSnapshotRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume_snapshot.GetVolumeSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_volume_snapshot - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_volume_snapshot(self, response: volume_snapshot.VolumeSnapshot) -> volume_snapshot.VolumeSnapshot: - """Post-rpc interceptor for get_volume_snapshot - - DEPRECATED. Please use the `post_get_volume_snapshot_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_get_volume_snapshot` interceptor runs - before the `post_get_volume_snapshot_with_metadata` interceptor. - """ - return response - - def post_get_volume_snapshot_with_metadata(self, response: volume_snapshot.VolumeSnapshot, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume_snapshot.VolumeSnapshot, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_volume_snapshot - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_get_volume_snapshot_with_metadata` - interceptor in new development instead of the `post_get_volume_snapshot` interceptor. - When both interceptors are used, this `post_get_volume_snapshot_with_metadata` interceptor runs after the - `post_get_volume_snapshot` interceptor. The (possibly modified) response returned by - `post_get_volume_snapshot` will be passed to - `post_get_volume_snapshot_with_metadata`. - """ - return response, metadata - - def pre_list_instances(self, request: instance.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_instances - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_instances(self, response: instance.ListInstancesResponse) -> instance.ListInstancesResponse: - """Post-rpc interceptor for list_instances - - DEPRECATED. Please use the `post_list_instances_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_instances` interceptor runs - before the `post_list_instances_with_metadata` interceptor. - """ - return response - - def post_list_instances_with_metadata(self, response: instance.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_instances - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_instances_with_metadata` - interceptor in new development instead of the `post_list_instances` interceptor. - When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the - `post_list_instances` interceptor. The (possibly modified) response returned by - `post_list_instances` will be passed to - `post_list_instances_with_metadata`. - """ - return response, metadata - - def pre_list_luns(self, request: lun.ListLunsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[lun.ListLunsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_luns - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_luns(self, response: lun.ListLunsResponse) -> lun.ListLunsResponse: - """Post-rpc interceptor for list_luns - - DEPRECATED. Please use the `post_list_luns_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_luns` interceptor runs - before the `post_list_luns_with_metadata` interceptor. - """ - return response - - def post_list_luns_with_metadata(self, response: lun.ListLunsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[lun.ListLunsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_luns - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_luns_with_metadata` - interceptor in new development instead of the `post_list_luns` interceptor. - When both interceptors are used, this `post_list_luns_with_metadata` interceptor runs after the - `post_list_luns` interceptor. The (possibly modified) response returned by - `post_list_luns` will be passed to - `post_list_luns_with_metadata`. - """ - return response, metadata - - def pre_list_networks(self, request: network.ListNetworksRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.ListNetworksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_networks - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_networks(self, response: network.ListNetworksResponse) -> network.ListNetworksResponse: - """Post-rpc interceptor for list_networks - - DEPRECATED. Please use the `post_list_networks_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_networks` interceptor runs - before the `post_list_networks_with_metadata` interceptor. - """ - return response - - def post_list_networks_with_metadata(self, response: network.ListNetworksResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.ListNetworksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_networks - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_networks_with_metadata` - interceptor in new development instead of the `post_list_networks` interceptor. - When both interceptors are used, this `post_list_networks_with_metadata` interceptor runs after the - `post_list_networks` interceptor. The (possibly modified) response returned by - `post_list_networks` will be passed to - `post_list_networks_with_metadata`. - """ - return response, metadata - - def pre_list_network_usage(self, request: network.ListNetworkUsageRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.ListNetworkUsageRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_network_usage - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_network_usage(self, response: network.ListNetworkUsageResponse) -> network.ListNetworkUsageResponse: - """Post-rpc interceptor for list_network_usage - - DEPRECATED. Please use the `post_list_network_usage_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_network_usage` interceptor runs - before the `post_list_network_usage_with_metadata` interceptor. - """ - return response - - def post_list_network_usage_with_metadata(self, response: network.ListNetworkUsageResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.ListNetworkUsageResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_network_usage - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_network_usage_with_metadata` - interceptor in new development instead of the `post_list_network_usage` interceptor. - When both interceptors are used, this `post_list_network_usage_with_metadata` interceptor runs after the - `post_list_network_usage` interceptor. The (possibly modified) response returned by - `post_list_network_usage` will be passed to - `post_list_network_usage_with_metadata`. - """ - return response, metadata - - def pre_list_nfs_shares(self, request: nfs_share.ListNfsSharesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.ListNfsSharesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_nfs_shares - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_nfs_shares(self, response: nfs_share.ListNfsSharesResponse) -> nfs_share.ListNfsSharesResponse: - """Post-rpc interceptor for list_nfs_shares - - DEPRECATED. Please use the `post_list_nfs_shares_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_nfs_shares` interceptor runs - before the `post_list_nfs_shares_with_metadata` interceptor. - """ - return response - - def post_list_nfs_shares_with_metadata(self, response: nfs_share.ListNfsSharesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.ListNfsSharesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_nfs_shares - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_nfs_shares_with_metadata` - interceptor in new development instead of the `post_list_nfs_shares` interceptor. - When both interceptors are used, this `post_list_nfs_shares_with_metadata` interceptor runs after the - `post_list_nfs_shares` interceptor. The (possibly modified) response returned by - `post_list_nfs_shares` will be passed to - `post_list_nfs_shares_with_metadata`. - """ - return response, metadata - - def pre_list_os_images(self, request: osimage.ListOSImagesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[osimage.ListOSImagesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_os_images - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_os_images(self, response: osimage.ListOSImagesResponse) -> osimage.ListOSImagesResponse: - """Post-rpc interceptor for list_os_images - - DEPRECATED. Please use the `post_list_os_images_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_os_images` interceptor runs - before the `post_list_os_images_with_metadata` interceptor. - """ - return response - - def post_list_os_images_with_metadata(self, response: osimage.ListOSImagesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[osimage.ListOSImagesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_os_images - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_os_images_with_metadata` - interceptor in new development instead of the `post_list_os_images` interceptor. - When both interceptors are used, this `post_list_os_images_with_metadata` interceptor runs after the - `post_list_os_images` interceptor. The (possibly modified) response returned by - `post_list_os_images` will be passed to - `post_list_os_images_with_metadata`. - """ - return response, metadata - - def pre_list_provisioning_quotas(self, request: provisioning.ListProvisioningQuotasRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.ListProvisioningQuotasRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_provisioning_quotas - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_provisioning_quotas(self, response: provisioning.ListProvisioningQuotasResponse) -> provisioning.ListProvisioningQuotasResponse: - """Post-rpc interceptor for list_provisioning_quotas - - DEPRECATED. Please use the `post_list_provisioning_quotas_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_provisioning_quotas` interceptor runs - before the `post_list_provisioning_quotas_with_metadata` interceptor. - """ - return response - - def post_list_provisioning_quotas_with_metadata(self, response: provisioning.ListProvisioningQuotasResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.ListProvisioningQuotasResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_provisioning_quotas - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_provisioning_quotas_with_metadata` - interceptor in new development instead of the `post_list_provisioning_quotas` interceptor. - When both interceptors are used, this `post_list_provisioning_quotas_with_metadata` interceptor runs after the - `post_list_provisioning_quotas` interceptor. The (possibly modified) response returned by - `post_list_provisioning_quotas` will be passed to - `post_list_provisioning_quotas_with_metadata`. - """ - return response, metadata - - def pre_list_ssh_keys(self, request: ssh_key.ListSSHKeysRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[ssh_key.ListSSHKeysRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_ssh_keys - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_ssh_keys(self, response: ssh_key.ListSSHKeysResponse) -> ssh_key.ListSSHKeysResponse: - """Post-rpc interceptor for list_ssh_keys - - DEPRECATED. Please use the `post_list_ssh_keys_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_ssh_keys` interceptor runs - before the `post_list_ssh_keys_with_metadata` interceptor. - """ - return response - - def post_list_ssh_keys_with_metadata(self, response: ssh_key.ListSSHKeysResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[ssh_key.ListSSHKeysResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_ssh_keys - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_ssh_keys_with_metadata` - interceptor in new development instead of the `post_list_ssh_keys` interceptor. - When both interceptors are used, this `post_list_ssh_keys_with_metadata` interceptor runs after the - `post_list_ssh_keys` interceptor. The (possibly modified) response returned by - `post_list_ssh_keys` will be passed to - `post_list_ssh_keys_with_metadata`. - """ - return response, metadata - - def pre_list_volumes(self, request: volume.ListVolumesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.ListVolumesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_volumes - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_volumes(self, response: volume.ListVolumesResponse) -> volume.ListVolumesResponse: - """Post-rpc interceptor for list_volumes - - DEPRECATED. Please use the `post_list_volumes_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_volumes` interceptor runs - before the `post_list_volumes_with_metadata` interceptor. - """ - return response - - def post_list_volumes_with_metadata(self, response: volume.ListVolumesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.ListVolumesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_volumes - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_volumes_with_metadata` - interceptor in new development instead of the `post_list_volumes` interceptor. - When both interceptors are used, this `post_list_volumes_with_metadata` interceptor runs after the - `post_list_volumes` interceptor. The (possibly modified) response returned by - `post_list_volumes` will be passed to - `post_list_volumes_with_metadata`. - """ - return response, metadata - - def pre_list_volume_snapshots(self, request: volume_snapshot.ListVolumeSnapshotsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume_snapshot.ListVolumeSnapshotsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_volume_snapshots - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_volume_snapshots(self, response: volume_snapshot.ListVolumeSnapshotsResponse) -> volume_snapshot.ListVolumeSnapshotsResponse: - """Post-rpc interceptor for list_volume_snapshots - - DEPRECATED. Please use the `post_list_volume_snapshots_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_list_volume_snapshots` interceptor runs - before the `post_list_volume_snapshots_with_metadata` interceptor. - """ - return response - - def post_list_volume_snapshots_with_metadata(self, response: volume_snapshot.ListVolumeSnapshotsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume_snapshot.ListVolumeSnapshotsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_volume_snapshots - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_list_volume_snapshots_with_metadata` - interceptor in new development instead of the `post_list_volume_snapshots` interceptor. - When both interceptors are used, this `post_list_volume_snapshots_with_metadata` interceptor runs after the - `post_list_volume_snapshots` interceptor. The (possibly modified) response returned by - `post_list_volume_snapshots` will be passed to - `post_list_volume_snapshots_with_metadata`. - """ - return response, metadata - - def pre_rename_instance(self, request: instance.RenameInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.RenameInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_rename_instance(self, response: instance.Instance) -> instance.Instance: - """Post-rpc interceptor for rename_instance - - DEPRECATED. Please use the `post_rename_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_rename_instance` interceptor runs - before the `post_rename_instance_with_metadata` interceptor. - """ - return response - - def post_rename_instance_with_metadata(self, response: instance.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_rename_instance_with_metadata` - interceptor in new development instead of the `post_rename_instance` interceptor. - When both interceptors are used, this `post_rename_instance_with_metadata` interceptor runs after the - `post_rename_instance` interceptor. The (possibly modified) response returned by - `post_rename_instance` will be passed to - `post_rename_instance_with_metadata`. - """ - return response, metadata - - def pre_rename_network(self, request: network.RenameNetworkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.RenameNetworkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_network - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_rename_network(self, response: network.Network) -> network.Network: - """Post-rpc interceptor for rename_network - - DEPRECATED. Please use the `post_rename_network_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_rename_network` interceptor runs - before the `post_rename_network_with_metadata` interceptor. - """ - return response - - def post_rename_network_with_metadata(self, response: network.Network, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[network.Network, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_network - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_rename_network_with_metadata` - interceptor in new development instead of the `post_rename_network` interceptor. - When both interceptors are used, this `post_rename_network_with_metadata` interceptor runs after the - `post_rename_network` interceptor. The (possibly modified) response returned by - `post_rename_network` will be passed to - `post_rename_network_with_metadata`. - """ - return response, metadata - - def pre_rename_nfs_share(self, request: nfs_share.RenameNfsShareRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.RenameNfsShareRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_nfs_share - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_rename_nfs_share(self, response: nfs_share.NfsShare) -> nfs_share.NfsShare: - """Post-rpc interceptor for rename_nfs_share - - DEPRECATED. Please use the `post_rename_nfs_share_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_rename_nfs_share` interceptor runs - before the `post_rename_nfs_share_with_metadata` interceptor. - """ - return response - - def post_rename_nfs_share_with_metadata(self, response: nfs_share.NfsShare, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[nfs_share.NfsShare, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_nfs_share - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_rename_nfs_share_with_metadata` - interceptor in new development instead of the `post_rename_nfs_share` interceptor. - When both interceptors are used, this `post_rename_nfs_share_with_metadata` interceptor runs after the - `post_rename_nfs_share` interceptor. The (possibly modified) response returned by - `post_rename_nfs_share` will be passed to - `post_rename_nfs_share_with_metadata`. - """ - return response, metadata - - def pre_rename_volume(self, request: volume.RenameVolumeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.RenameVolumeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_volume - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_rename_volume(self, response: volume.Volume) -> volume.Volume: - """Post-rpc interceptor for rename_volume - - DEPRECATED. Please use the `post_rename_volume_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_rename_volume` interceptor runs - before the `post_rename_volume_with_metadata` interceptor. - """ - return response - - def post_rename_volume_with_metadata(self, response: volume.Volume, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[volume.Volume, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_volume - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_rename_volume_with_metadata` - interceptor in new development instead of the `post_rename_volume` interceptor. - When both interceptors are used, this `post_rename_volume_with_metadata` interceptor runs after the - `post_rename_volume` interceptor. The (possibly modified) response returned by - `post_rename_volume` will be passed to - `post_rename_volume_with_metadata`. - """ - return response, metadata - - def pre_reset_instance(self, request: instance.ResetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.ResetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for reset_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_reset_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for reset_instance - - DEPRECATED. Please use the `post_reset_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_reset_instance` interceptor runs - before the `post_reset_instance_with_metadata` interceptor. - """ - return response - - def post_reset_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for reset_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_reset_instance_with_metadata` - interceptor in new development instead of the `post_reset_instance` interceptor. - When both interceptors are used, this `post_reset_instance_with_metadata` interceptor runs after the - `post_reset_instance` interceptor. The (possibly modified) response returned by - `post_reset_instance` will be passed to - `post_reset_instance_with_metadata`. - """ - return response, metadata - - def pre_resize_volume(self, request: gcb_volume.ResizeVolumeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_volume.ResizeVolumeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for resize_volume - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_resize_volume(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for resize_volume - - DEPRECATED. Please use the `post_resize_volume_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_resize_volume` interceptor runs - before the `post_resize_volume_with_metadata` interceptor. - """ - return response - - def post_resize_volume_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for resize_volume - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_resize_volume_with_metadata` - interceptor in new development instead of the `post_resize_volume` interceptor. - When both interceptors are used, this `post_resize_volume_with_metadata` interceptor runs after the - `post_resize_volume` interceptor. The (possibly modified) response returned by - `post_resize_volume` will be passed to - `post_resize_volume_with_metadata`. - """ - return response, metadata - - def pre_restore_volume_snapshot(self, request: gcb_volume_snapshot.RestoreVolumeSnapshotRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_volume_snapshot.RestoreVolumeSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for restore_volume_snapshot - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_restore_volume_snapshot(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for restore_volume_snapshot - - DEPRECATED. Please use the `post_restore_volume_snapshot_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_restore_volume_snapshot` interceptor runs - before the `post_restore_volume_snapshot_with_metadata` interceptor. - """ - return response - - def post_restore_volume_snapshot_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for restore_volume_snapshot - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_restore_volume_snapshot_with_metadata` - interceptor in new development instead of the `post_restore_volume_snapshot` interceptor. - When both interceptors are used, this `post_restore_volume_snapshot_with_metadata` interceptor runs after the - `post_restore_volume_snapshot` interceptor. The (possibly modified) response returned by - `post_restore_volume_snapshot` will be passed to - `post_restore_volume_snapshot_with_metadata`. - """ - return response, metadata - - def pre_start_instance(self, request: instance.StartInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.StartInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for start_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_start_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for start_instance - - DEPRECATED. Please use the `post_start_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_start_instance` interceptor runs - before the `post_start_instance_with_metadata` interceptor. - """ - return response - - def post_start_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for start_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_start_instance_with_metadata` - interceptor in new development instead of the `post_start_instance` interceptor. - When both interceptors are used, this `post_start_instance_with_metadata` interceptor runs after the - `post_start_instance` interceptor. The (possibly modified) response returned by - `post_start_instance` will be passed to - `post_start_instance_with_metadata`. - """ - return response, metadata - - def pre_stop_instance(self, request: instance.StopInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[instance.StopInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for stop_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_stop_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for stop_instance - - DEPRECATED. Please use the `post_stop_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_stop_instance` interceptor runs - before the `post_stop_instance_with_metadata` interceptor. - """ - return response - - def post_stop_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for stop_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_stop_instance_with_metadata` - interceptor in new development instead of the `post_stop_instance` interceptor. - When both interceptors are used, this `post_stop_instance_with_metadata` interceptor runs after the - `post_stop_instance` interceptor. The (possibly modified) response returned by - `post_stop_instance` will be passed to - `post_stop_instance_with_metadata`. - """ - return response, metadata - - def pre_submit_provisioning_config(self, request: provisioning.SubmitProvisioningConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.SubmitProvisioningConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for submit_provisioning_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_submit_provisioning_config(self, response: provisioning.SubmitProvisioningConfigResponse) -> provisioning.SubmitProvisioningConfigResponse: - """Post-rpc interceptor for submit_provisioning_config - - DEPRECATED. Please use the `post_submit_provisioning_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_submit_provisioning_config` interceptor runs - before the `post_submit_provisioning_config_with_metadata` interceptor. - """ - return response - - def post_submit_provisioning_config_with_metadata(self, response: provisioning.SubmitProvisioningConfigResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.SubmitProvisioningConfigResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for submit_provisioning_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_submit_provisioning_config_with_metadata` - interceptor in new development instead of the `post_submit_provisioning_config` interceptor. - When both interceptors are used, this `post_submit_provisioning_config_with_metadata` interceptor runs after the - `post_submit_provisioning_config` interceptor. The (possibly modified) response returned by - `post_submit_provisioning_config` will be passed to - `post_submit_provisioning_config_with_metadata`. - """ - return response, metadata - - def pre_update_instance(self, request: gcb_instance.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_instance.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_instance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_instance - - DEPRECATED. Please use the `post_update_instance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_update_instance` interceptor runs - before the `post_update_instance_with_metadata` interceptor. - """ - return response - - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_instance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_update_instance_with_metadata` - interceptor in new development instead of the `post_update_instance` interceptor. - When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the - `post_update_instance` interceptor. The (possibly modified) response returned by - `post_update_instance` will be passed to - `post_update_instance_with_metadata`. - """ - return response, metadata - - def pre_update_network(self, request: gcb_network.UpdateNetworkRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_network.UpdateNetworkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_network - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_update_network(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_network - - DEPRECATED. Please use the `post_update_network_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_update_network` interceptor runs - before the `post_update_network_with_metadata` interceptor. - """ - return response - - def post_update_network_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_network - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_update_network_with_metadata` - interceptor in new development instead of the `post_update_network` interceptor. - When both interceptors are used, this `post_update_network_with_metadata` interceptor runs after the - `post_update_network` interceptor. The (possibly modified) response returned by - `post_update_network` will be passed to - `post_update_network_with_metadata`. - """ - return response, metadata - - def pre_update_nfs_share(self, request: gcb_nfs_share.UpdateNfsShareRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_nfs_share.UpdateNfsShareRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_nfs_share - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_update_nfs_share(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_nfs_share - - DEPRECATED. Please use the `post_update_nfs_share_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_update_nfs_share` interceptor runs - before the `post_update_nfs_share_with_metadata` interceptor. - """ - return response - - def post_update_nfs_share_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_nfs_share - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_update_nfs_share_with_metadata` - interceptor in new development instead of the `post_update_nfs_share` interceptor. - When both interceptors are used, this `post_update_nfs_share_with_metadata` interceptor runs after the - `post_update_nfs_share` interceptor. The (possibly modified) response returned by - `post_update_nfs_share` will be passed to - `post_update_nfs_share_with_metadata`. - """ - return response, metadata - - def pre_update_provisioning_config(self, request: provisioning.UpdateProvisioningConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.UpdateProvisioningConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_provisioning_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_update_provisioning_config(self, response: provisioning.ProvisioningConfig) -> provisioning.ProvisioningConfig: - """Post-rpc interceptor for update_provisioning_config - - DEPRECATED. Please use the `post_update_provisioning_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_update_provisioning_config` interceptor runs - before the `post_update_provisioning_config_with_metadata` interceptor. - """ - return response - - def post_update_provisioning_config_with_metadata(self, response: provisioning.ProvisioningConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[provisioning.ProvisioningConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_provisioning_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_update_provisioning_config_with_metadata` - interceptor in new development instead of the `post_update_provisioning_config` interceptor. - When both interceptors are used, this `post_update_provisioning_config_with_metadata` interceptor runs after the - `post_update_provisioning_config` interceptor. The (possibly modified) response returned by - `post_update_provisioning_config` will be passed to - `post_update_provisioning_config_with_metadata`. - """ - return response, metadata - - def pre_update_volume(self, request: gcb_volume.UpdateVolumeRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_volume.UpdateVolumeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_volume - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_update_volume(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_volume - - DEPRECATED. Please use the `post_update_volume_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. This `post_update_volume` interceptor runs - before the `post_update_volume_with_metadata` interceptor. - """ - return response - - def post_update_volume_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_volume - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BareMetalSolution server but before it is returned to user code. - - We recommend only using this `post_update_volume_with_metadata` - interceptor in new development instead of the `post_update_volume` interceptor. - When both interceptors are used, this `post_update_volume_with_metadata` interceptor runs after the - `post_update_volume` interceptor. The (possibly modified) response returned by - `post_update_volume` will be passed to - `post_update_volume_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BareMetalSolution server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the BareMetalSolution server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class BareMetalSolutionRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BareMetalSolutionRestInterceptor - - -class BareMetalSolutionRestTransport(_BaseBareMetalSolutionRestTransport): - """REST backend synchronous transport for BareMetalSolution. - - Performs management operations on Bare Metal Solution servers. - - The ``baremetalsolution.googleapis.com`` service provides management - capabilities for Bare Metal Solution servers. To access the API - methods, you must assign Bare Metal Solution IAM roles containing - the desired permissions to your staff in your Google Cloud project. - You must also enable the Bare Metal Solution API. Once enabled, the - methods act upon specific servers in your Bare Metal Solution - environment. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'baremetalsolution.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BareMetalSolutionRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'baremetalsolution.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BareMetalSolutionRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v2") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateNfsShare(_BaseBareMetalSolutionRestTransport._BaseCreateNfsShare, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.CreateNfsShare") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_nfs_share.CreateNfsShareRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create nfs share method over HTTP. - - Args: - request (~.gcb_nfs_share.CreateNfsShareRequest): - The request object. Message for creating an NFS share. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseCreateNfsShare._get_http_options() - - request, metadata = self._interceptor.pre_create_nfs_share(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseCreateNfsShare._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseCreateNfsShare._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseCreateNfsShare._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.CreateNfsShare", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateNfsShare", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._CreateNfsShare._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_nfs_share(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_nfs_share_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.create_nfs_share", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateNfsShare", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateProvisioningConfig(_BaseBareMetalSolutionRestTransport._BaseCreateProvisioningConfig, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.CreateProvisioningConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: provisioning.CreateProvisioningConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> provisioning.ProvisioningConfig: - r"""Call the create provisioning - config method over HTTP. - - Args: - request (~.provisioning.CreateProvisioningConfigRequest): - The request object. Request for CreateProvisioningConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.provisioning.ProvisioningConfig: - A provisioning configuration. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseCreateProvisioningConfig._get_http_options() - - request, metadata = self._interceptor.pre_create_provisioning_config(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseCreateProvisioningConfig._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseCreateProvisioningConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseCreateProvisioningConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.CreateProvisioningConfig", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateProvisioningConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._CreateProvisioningConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = provisioning.ProvisioningConfig() - pb_resp = provisioning.ProvisioningConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_provisioning_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_provisioning_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = provisioning.ProvisioningConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.create_provisioning_config", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateProvisioningConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateSSHKey(_BaseBareMetalSolutionRestTransport._BaseCreateSSHKey, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.CreateSSHKey") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_ssh_key.CreateSSHKeyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_ssh_key.SSHKey: - r"""Call the create ssh key method over HTTP. - - Args: - request (~.gcb_ssh_key.CreateSSHKeyRequest): - The request object. Message for registering a public SSH - key in a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_ssh_key.SSHKey: - An SSH key, used for authorizing with - the interactive serial console feature. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseCreateSSHKey._get_http_options() - - request, metadata = self._interceptor.pre_create_ssh_key(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseCreateSSHKey._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseCreateSSHKey._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseCreateSSHKey._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.CreateSSHKey", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateSSHKey", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._CreateSSHKey._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_ssh_key.SSHKey() - pb_resp = gcb_ssh_key.SSHKey.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_ssh_key(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_ssh_key_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_ssh_key.SSHKey.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.create_ssh_key", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateSSHKey", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateVolumeSnapshot(_BaseBareMetalSolutionRestTransport._BaseCreateVolumeSnapshot, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.CreateVolumeSnapshot") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_volume_snapshot.CreateVolumeSnapshotRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_volume_snapshot.VolumeSnapshot: - r"""Call the create volume snapshot method over HTTP. - - Args: - request (~.gcb_volume_snapshot.CreateVolumeSnapshotRequest): - The request object. Message for creating a volume - snapshot. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_volume_snapshot.VolumeSnapshot: - A snapshot of a volume. Only boot - volumes can have snapshots. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseCreateVolumeSnapshot._get_http_options() - - request, metadata = self._interceptor.pre_create_volume_snapshot(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseCreateVolumeSnapshot._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseCreateVolumeSnapshot._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseCreateVolumeSnapshot._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.CreateVolumeSnapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateVolumeSnapshot", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._CreateVolumeSnapshot._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_volume_snapshot.VolumeSnapshot() - pb_resp = gcb_volume_snapshot.VolumeSnapshot.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_volume_snapshot(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_volume_snapshot_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_volume_snapshot.VolumeSnapshot.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.create_volume_snapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "CreateVolumeSnapshot", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteNfsShare(_BaseBareMetalSolutionRestTransport._BaseDeleteNfsShare, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.DeleteNfsShare") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: nfs_share.DeleteNfsShareRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete nfs share method over HTTP. - - Args: - request (~.nfs_share.DeleteNfsShareRequest): - The request object. Message for deleting an NFS share. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseDeleteNfsShare._get_http_options() - - request, metadata = self._interceptor.pre_delete_nfs_share(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseDeleteNfsShare._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseDeleteNfsShare._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.DeleteNfsShare", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DeleteNfsShare", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._DeleteNfsShare._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_nfs_share(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_nfs_share_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.delete_nfs_share", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DeleteNfsShare", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteSSHKey(_BaseBareMetalSolutionRestTransport._BaseDeleteSSHKey, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.DeleteSSHKey") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: ssh_key.DeleteSSHKeyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete ssh key method over HTTP. - - Args: - request (~.ssh_key.DeleteSSHKeyRequest): - The request object. Message for deleting an SSH key from - a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseDeleteSSHKey._get_http_options() - - request, metadata = self._interceptor.pre_delete_ssh_key(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseDeleteSSHKey._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseDeleteSSHKey._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.DeleteSSHKey", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DeleteSSHKey", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._DeleteSSHKey._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteVolumeSnapshot(_BaseBareMetalSolutionRestTransport._BaseDeleteVolumeSnapshot, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.DeleteVolumeSnapshot") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: volume_snapshot.DeleteVolumeSnapshotRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete volume snapshot method over HTTP. - - Args: - request (~.volume_snapshot.DeleteVolumeSnapshotRequest): - The request object. Message for deleting named Volume - snapshot. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseDeleteVolumeSnapshot._get_http_options() - - request, metadata = self._interceptor.pre_delete_volume_snapshot(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseDeleteVolumeSnapshot._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseDeleteVolumeSnapshot._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.DeleteVolumeSnapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DeleteVolumeSnapshot", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._DeleteVolumeSnapshot._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DetachLun(_BaseBareMetalSolutionRestTransport._BaseDetachLun, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.DetachLun") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_instance.DetachLunRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the detach lun method over HTTP. - - Args: - request (~.gcb_instance.DetachLunRequest): - The request object. Message for detach specific LUN from - an Instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseDetachLun._get_http_options() - - request, metadata = self._interceptor.pre_detach_lun(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseDetachLun._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseDetachLun._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseDetachLun._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.DetachLun", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DetachLun", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._DetachLun._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_detach_lun(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_detach_lun_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.detach_lun", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DetachLun", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DisableInteractiveSerialConsole(_BaseBareMetalSolutionRestTransport._BaseDisableInteractiveSerialConsole, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.DisableInteractiveSerialConsole") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: instance.DisableInteractiveSerialConsoleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the disable interactive - serial console method over HTTP. - - Args: - request (~.instance.DisableInteractiveSerialConsoleRequest): - The request object. Message for disabling the interactive - serial console on an instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseDisableInteractiveSerialConsole._get_http_options() - - request, metadata = self._interceptor.pre_disable_interactive_serial_console(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseDisableInteractiveSerialConsole._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseDisableInteractiveSerialConsole._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseDisableInteractiveSerialConsole._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.DisableInteractiveSerialConsole", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DisableInteractiveSerialConsole", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._DisableInteractiveSerialConsole._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_disable_interactive_serial_console(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_disable_interactive_serial_console_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.disable_interactive_serial_console", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "DisableInteractiveSerialConsole", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _EnableInteractiveSerialConsole(_BaseBareMetalSolutionRestTransport._BaseEnableInteractiveSerialConsole, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.EnableInteractiveSerialConsole") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: instance.EnableInteractiveSerialConsoleRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the enable interactive serial - console method over HTTP. - - Args: - request (~.instance.EnableInteractiveSerialConsoleRequest): - The request object. Message for enabling the interactive - serial console on an instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseEnableInteractiveSerialConsole._get_http_options() - - request, metadata = self._interceptor.pre_enable_interactive_serial_console(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseEnableInteractiveSerialConsole._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseEnableInteractiveSerialConsole._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseEnableInteractiveSerialConsole._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.EnableInteractiveSerialConsole", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "EnableInteractiveSerialConsole", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._EnableInteractiveSerialConsole._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_enable_interactive_serial_console(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_enable_interactive_serial_console_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.enable_interactive_serial_console", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "EnableInteractiveSerialConsole", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _EvictLun(_BaseBareMetalSolutionRestTransport._BaseEvictLun, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.EvictLun") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: lun.EvictLunRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the evict lun method over HTTP. - - Args: - request (~.lun.EvictLunRequest): - The request object. Request for skip lun cooloff and - delete it. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseEvictLun._get_http_options() - - request, metadata = self._interceptor.pre_evict_lun(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseEvictLun._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseEvictLun._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseEvictLun._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.EvictLun", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "EvictLun", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._EvictLun._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_evict_lun(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_evict_lun_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.evict_lun", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "EvictLun", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _EvictVolume(_BaseBareMetalSolutionRestTransport._BaseEvictVolume, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.EvictVolume") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: volume.EvictVolumeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the evict volume method over HTTP. - - Args: - request (~.volume.EvictVolumeRequest): - The request object. Request for skip volume cooloff and - delete it. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseEvictVolume._get_http_options() - - request, metadata = self._interceptor.pre_evict_volume(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseEvictVolume._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseEvictVolume._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseEvictVolume._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.EvictVolume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "EvictVolume", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._EvictVolume._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_evict_volume(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_evict_volume_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.evict_volume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "EvictVolume", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetInstance(_BaseBareMetalSolutionRestTransport._BaseGetInstance, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: instance.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> instance.Instance: - r"""Call the get instance method over HTTP. - - Args: - request (~.instance.GetInstanceRequest): - The request object. Message for requesting server - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.instance.Instance: - A server. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetInstance._get_http_options() - - request, metadata = self._interceptor.pre_get_instance(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetInstance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = instance.Instance() - pb_resp = instance.Instance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = instance.Instance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_instance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetLun(_BaseBareMetalSolutionRestTransport._BaseGetLun, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetLun") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: lun.GetLunRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> lun.Lun: - r"""Call the get lun method over HTTP. - - Args: - request (~.lun.GetLunRequest): - The request object. Message for requesting storage lun - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.lun.Lun: - A storage volume logical unit number - (LUN). - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetLun._get_http_options() - - request, metadata = self._interceptor.pre_get_lun(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetLun._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetLun._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetLun", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetLun", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetLun._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = lun.Lun() - pb_resp = lun.Lun.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_lun(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_lun_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = lun.Lun.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_lun", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetLun", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetNetwork(_BaseBareMetalSolutionRestTransport._BaseGetNetwork, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetNetwork") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: network.GetNetworkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> network.Network: - r"""Call the get network method over HTTP. - - Args: - request (~.network.GetNetworkRequest): - The request object. Message for requesting network - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.network.Network: - A Network. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetNetwork._get_http_options() - - request, metadata = self._interceptor.pre_get_network(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetNetwork._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetNetwork._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetNetwork", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetNetwork", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetNetwork._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = network.Network() - pb_resp = network.Network.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_network(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_network_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = network.Network.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_network", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetNetwork", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetNfsShare(_BaseBareMetalSolutionRestTransport._BaseGetNfsShare, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetNfsShare") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: nfs_share.GetNfsShareRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> nfs_share.NfsShare: - r"""Call the get nfs share method over HTTP. - - Args: - request (~.nfs_share.GetNfsShareRequest): - The request object. Message for requesting NFS share - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.nfs_share.NfsShare: - An NFS share. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetNfsShare._get_http_options() - - request, metadata = self._interceptor.pre_get_nfs_share(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetNfsShare._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetNfsShare._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetNfsShare", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetNfsShare", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetNfsShare._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = nfs_share.NfsShare() - pb_resp = nfs_share.NfsShare.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_nfs_share(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_nfs_share_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = nfs_share.NfsShare.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_nfs_share", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetNfsShare", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetProvisioningConfig(_BaseBareMetalSolutionRestTransport._BaseGetProvisioningConfig, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetProvisioningConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: provisioning.GetProvisioningConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> provisioning.ProvisioningConfig: - r"""Call the get provisioning config method over HTTP. - - Args: - request (~.provisioning.GetProvisioningConfigRequest): - The request object. Request for GetProvisioningConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.provisioning.ProvisioningConfig: - A provisioning configuration. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetProvisioningConfig._get_http_options() - - request, metadata = self._interceptor.pre_get_provisioning_config(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetProvisioningConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetProvisioningConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetProvisioningConfig", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetProvisioningConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetProvisioningConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = provisioning.ProvisioningConfig() - pb_resp = provisioning.ProvisioningConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_provisioning_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_provisioning_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = provisioning.ProvisioningConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_provisioning_config", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetProvisioningConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetVolume(_BaseBareMetalSolutionRestTransport._BaseGetVolume, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetVolume") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: volume.GetVolumeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> volume.Volume: - r"""Call the get volume method over HTTP. - - Args: - request (~.volume.GetVolumeRequest): - The request object. Message for requesting storage volume - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.volume.Volume: - A storage volume. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetVolume._get_http_options() - - request, metadata = self._interceptor.pre_get_volume(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetVolume._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetVolume._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetVolume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetVolume", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetVolume._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = volume.Volume() - pb_resp = volume.Volume.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_volume(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_volume_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = volume.Volume.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_volume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetVolume", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetVolumeSnapshot(_BaseBareMetalSolutionRestTransport._BaseGetVolumeSnapshot, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetVolumeSnapshot") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: volume_snapshot.GetVolumeSnapshotRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> volume_snapshot.VolumeSnapshot: - r"""Call the get volume snapshot method over HTTP. - - Args: - request (~.volume_snapshot.GetVolumeSnapshotRequest): - The request object. Message for requesting volume - snapshot information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.volume_snapshot.VolumeSnapshot: - A snapshot of a volume. Only boot - volumes can have snapshots. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetVolumeSnapshot._get_http_options() - - request, metadata = self._interceptor.pre_get_volume_snapshot(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetVolumeSnapshot._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetVolumeSnapshot._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetVolumeSnapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetVolumeSnapshot", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetVolumeSnapshot._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = volume_snapshot.VolumeSnapshot() - pb_resp = volume_snapshot.VolumeSnapshot.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_volume_snapshot(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_volume_snapshot_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = volume_snapshot.VolumeSnapshot.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.get_volume_snapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetVolumeSnapshot", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListInstances(_BaseBareMetalSolutionRestTransport._BaseListInstances, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListInstances") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: instance.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> instance.ListInstancesResponse: - r"""Call the list instances method over HTTP. - - Args: - request (~.instance.ListInstancesRequest): - The request object. Message for requesting the list of - servers. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.instance.ListInstancesResponse: - Response message for the list of - servers. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListInstances._get_http_options() - - request, metadata = self._interceptor.pre_list_instances(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListInstances._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListInstances", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListInstances", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = instance.ListInstancesResponse() - pb_resp = instance.ListInstancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_instances(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = instance.ListInstancesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_instances", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListInstances", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListLuns(_BaseBareMetalSolutionRestTransport._BaseListLuns, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListLuns") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: lun.ListLunsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> lun.ListLunsResponse: - r"""Call the list luns method over HTTP. - - Args: - request (~.lun.ListLunsRequest): - The request object. Message for requesting a list of - storage volume luns. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.lun.ListLunsResponse: - Response message containing the list - of storage volume luns. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListLuns._get_http_options() - - request, metadata = self._interceptor.pre_list_luns(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListLuns._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListLuns._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListLuns", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListLuns", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListLuns._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = lun.ListLunsResponse() - pb_resp = lun.ListLunsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_luns(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_luns_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = lun.ListLunsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_luns", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListLuns", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListNetworks(_BaseBareMetalSolutionRestTransport._BaseListNetworks, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListNetworks") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: network.ListNetworksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> network.ListNetworksResponse: - r"""Call the list networks method over HTTP. - - Args: - request (~.network.ListNetworksRequest): - The request object. Message for requesting a list of - networks. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.network.ListNetworksResponse: - Response message containing the list - of networks. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListNetworks._get_http_options() - - request, metadata = self._interceptor.pre_list_networks(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListNetworks._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListNetworks._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListNetworks", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListNetworks", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListNetworks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = network.ListNetworksResponse() - pb_resp = network.ListNetworksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_networks(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_networks_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = network.ListNetworksResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_networks", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListNetworks", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListNetworkUsage(_BaseBareMetalSolutionRestTransport._BaseListNetworkUsage, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListNetworkUsage") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: network.ListNetworkUsageRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> network.ListNetworkUsageResponse: - r"""Call the list network usage method over HTTP. - - Args: - request (~.network.ListNetworkUsageRequest): - The request object. Request to get networks with IPs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.network.ListNetworkUsageResponse: - Response with Networks with IPs - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListNetworkUsage._get_http_options() - - request, metadata = self._interceptor.pre_list_network_usage(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListNetworkUsage._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListNetworkUsage._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListNetworkUsage", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListNetworkUsage", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListNetworkUsage._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = network.ListNetworkUsageResponse() - pb_resp = network.ListNetworkUsageResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_network_usage(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_network_usage_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = network.ListNetworkUsageResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_network_usage", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListNetworkUsage", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListNfsShares(_BaseBareMetalSolutionRestTransport._BaseListNfsShares, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListNfsShares") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: nfs_share.ListNfsSharesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> nfs_share.ListNfsSharesResponse: - r"""Call the list nfs shares method over HTTP. - - Args: - request (~.nfs_share.ListNfsSharesRequest): - The request object. Message for requesting a list of NFS - shares. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.nfs_share.ListNfsSharesResponse: - Response message containing the list - of NFS shares. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListNfsShares._get_http_options() - - request, metadata = self._interceptor.pre_list_nfs_shares(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListNfsShares._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListNfsShares._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListNfsShares", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListNfsShares", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListNfsShares._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = nfs_share.ListNfsSharesResponse() - pb_resp = nfs_share.ListNfsSharesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_nfs_shares(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_nfs_shares_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = nfs_share.ListNfsSharesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_nfs_shares", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListNfsShares", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListOSImages(_BaseBareMetalSolutionRestTransport._BaseListOSImages, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListOSImages") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: osimage.ListOSImagesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> osimage.ListOSImagesResponse: - r"""Call the list os images method over HTTP. - - Args: - request (~.osimage.ListOSImagesRequest): - The request object. Request for getting all available OS - images. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.osimage.ListOSImagesResponse: - Request for getting all available OS - images. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListOSImages._get_http_options() - - request, metadata = self._interceptor.pre_list_os_images(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListOSImages._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListOSImages._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListOSImages", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListOSImages", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListOSImages._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = osimage.ListOSImagesResponse() - pb_resp = osimage.ListOSImagesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_os_images(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_os_images_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = osimage.ListOSImagesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_os_images", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListOSImages", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListProvisioningQuotas(_BaseBareMetalSolutionRestTransport._BaseListProvisioningQuotas, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListProvisioningQuotas") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: provisioning.ListProvisioningQuotasRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> provisioning.ListProvisioningQuotasResponse: - r"""Call the list provisioning quotas method over HTTP. - - Args: - request (~.provisioning.ListProvisioningQuotasRequest): - The request object. Message for requesting the list of - provisioning quotas. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.provisioning.ListProvisioningQuotasResponse: - Response message for the list of - provisioning quotas. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListProvisioningQuotas._get_http_options() - - request, metadata = self._interceptor.pre_list_provisioning_quotas(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListProvisioningQuotas._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListProvisioningQuotas._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListProvisioningQuotas", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListProvisioningQuotas", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListProvisioningQuotas._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = provisioning.ListProvisioningQuotasResponse() - pb_resp = provisioning.ListProvisioningQuotasResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_provisioning_quotas(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_provisioning_quotas_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = provisioning.ListProvisioningQuotasResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_provisioning_quotas", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListProvisioningQuotas", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListSSHKeys(_BaseBareMetalSolutionRestTransport._BaseListSSHKeys, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListSSHKeys") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: ssh_key.ListSSHKeysRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> ssh_key.ListSSHKeysResponse: - r"""Call the list ssh keys method over HTTP. - - Args: - request (~.ssh_key.ListSSHKeysRequest): - The request object. Message for listing the public SSH - keys in a project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.ssh_key.ListSSHKeysResponse: - Message for response of ListSSHKeys. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListSSHKeys._get_http_options() - - request, metadata = self._interceptor.pre_list_ssh_keys(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListSSHKeys._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListSSHKeys._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListSSHKeys", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListSSHKeys", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListSSHKeys._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = ssh_key.ListSSHKeysResponse() - pb_resp = ssh_key.ListSSHKeysResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_ssh_keys(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_ssh_keys_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = ssh_key.ListSSHKeysResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_ssh_keys", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListSSHKeys", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListVolumes(_BaseBareMetalSolutionRestTransport._BaseListVolumes, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListVolumes") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: volume.ListVolumesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> volume.ListVolumesResponse: - r"""Call the list volumes method over HTTP. - - Args: - request (~.volume.ListVolumesRequest): - The request object. Message for requesting a list of - storage volumes. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.volume.ListVolumesResponse: - Response message containing the list - of storage volumes. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListVolumes._get_http_options() - - request, metadata = self._interceptor.pre_list_volumes(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListVolumes._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListVolumes._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListVolumes", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListVolumes", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListVolumes._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = volume.ListVolumesResponse() - pb_resp = volume.ListVolumesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_volumes(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_volumes_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = volume.ListVolumesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_volumes", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListVolumes", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListVolumeSnapshots(_BaseBareMetalSolutionRestTransport._BaseListVolumeSnapshots, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListVolumeSnapshots") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: volume_snapshot.ListVolumeSnapshotsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> volume_snapshot.ListVolumeSnapshotsResponse: - r"""Call the list volume snapshots method over HTTP. - - Args: - request (~.volume_snapshot.ListVolumeSnapshotsRequest): - The request object. Message for requesting a list of - volume snapshots. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.volume_snapshot.ListVolumeSnapshotsResponse: - Response message containing the list - of volume snapshots. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListVolumeSnapshots._get_http_options() - - request, metadata = self._interceptor.pre_list_volume_snapshots(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListVolumeSnapshots._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListVolumeSnapshots._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListVolumeSnapshots", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListVolumeSnapshots", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListVolumeSnapshots._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = volume_snapshot.ListVolumeSnapshotsResponse() - pb_resp = volume_snapshot.ListVolumeSnapshotsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_volume_snapshots(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_volume_snapshots_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = volume_snapshot.ListVolumeSnapshotsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.list_volume_snapshots", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListVolumeSnapshots", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameInstance(_BaseBareMetalSolutionRestTransport._BaseRenameInstance, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.RenameInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: instance.RenameInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> instance.Instance: - r"""Call the rename instance method over HTTP. - - Args: - request (~.instance.RenameInstanceRequest): - The request object. Message requesting rename of a - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.instance.Instance: - A server. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseRenameInstance._get_http_options() - - request, metadata = self._interceptor.pre_rename_instance(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseRenameInstance._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseRenameInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseRenameInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.RenameInstance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._RenameInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = instance.Instance() - pb_resp = instance.Instance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = instance.Instance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.rename_instance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameNetwork(_BaseBareMetalSolutionRestTransport._BaseRenameNetwork, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.RenameNetwork") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: network.RenameNetworkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> network.Network: - r"""Call the rename network method over HTTP. - - Args: - request (~.network.RenameNetworkRequest): - The request object. Message requesting rename of a - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.network.Network: - A Network. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseRenameNetwork._get_http_options() - - request, metadata = self._interceptor.pre_rename_network(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseRenameNetwork._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseRenameNetwork._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseRenameNetwork._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.RenameNetwork", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameNetwork", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._RenameNetwork._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = network.Network() - pb_resp = network.Network.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_network(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_network_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = network.Network.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.rename_network", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameNetwork", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameNfsShare(_BaseBareMetalSolutionRestTransport._BaseRenameNfsShare, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.RenameNfsShare") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: nfs_share.RenameNfsShareRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> nfs_share.NfsShare: - r"""Call the rename nfs share method over HTTP. - - Args: - request (~.nfs_share.RenameNfsShareRequest): - The request object. Message requesting rename of a - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.nfs_share.NfsShare: - An NFS share. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseRenameNfsShare._get_http_options() - - request, metadata = self._interceptor.pre_rename_nfs_share(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseRenameNfsShare._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseRenameNfsShare._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseRenameNfsShare._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.RenameNfsShare", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameNfsShare", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._RenameNfsShare._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = nfs_share.NfsShare() - pb_resp = nfs_share.NfsShare.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_nfs_share(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_nfs_share_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = nfs_share.NfsShare.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.rename_nfs_share", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameNfsShare", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameVolume(_BaseBareMetalSolutionRestTransport._BaseRenameVolume, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.RenameVolume") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: volume.RenameVolumeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> volume.Volume: - r"""Call the rename volume method over HTTP. - - Args: - request (~.volume.RenameVolumeRequest): - The request object. Message requesting rename of a - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.volume.Volume: - A storage volume. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseRenameVolume._get_http_options() - - request, metadata = self._interceptor.pre_rename_volume(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseRenameVolume._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseRenameVolume._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseRenameVolume._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.RenameVolume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameVolume", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._RenameVolume._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = volume.Volume() - pb_resp = volume.Volume.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_volume(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_volume_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = volume.Volume.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.rename_volume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RenameVolume", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ResetInstance(_BaseBareMetalSolutionRestTransport._BaseResetInstance, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ResetInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: instance.ResetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the reset instance method over HTTP. - - Args: - request (~.instance.ResetInstanceRequest): - The request object. Message requesting to reset a server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseResetInstance._get_http_options() - - request, metadata = self._interceptor.pre_reset_instance(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseResetInstance._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseResetInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseResetInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ResetInstance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ResetInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ResetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_reset_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_reset_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.reset_instance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ResetInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ResizeVolume(_BaseBareMetalSolutionRestTransport._BaseResizeVolume, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ResizeVolume") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_volume.ResizeVolumeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the resize volume method over HTTP. - - Args: - request (~.gcb_volume.ResizeVolumeRequest): - The request object. Request for emergency resize Volume. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseResizeVolume._get_http_options() - - request, metadata = self._interceptor.pre_resize_volume(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseResizeVolume._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseResizeVolume._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseResizeVolume._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ResizeVolume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ResizeVolume", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ResizeVolume._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_resize_volume(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_resize_volume_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.resize_volume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ResizeVolume", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RestoreVolumeSnapshot(_BaseBareMetalSolutionRestTransport._BaseRestoreVolumeSnapshot, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.RestoreVolumeSnapshot") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_volume_snapshot.RestoreVolumeSnapshotRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the restore volume snapshot method over HTTP. - - Args: - request (~.gcb_volume_snapshot.RestoreVolumeSnapshotRequest): - The request object. Message for restoring a volume - snapshot. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseRestoreVolumeSnapshot._get_http_options() - - request, metadata = self._interceptor.pre_restore_volume_snapshot(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseRestoreVolumeSnapshot._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseRestoreVolumeSnapshot._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseRestoreVolumeSnapshot._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.RestoreVolumeSnapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RestoreVolumeSnapshot", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._RestoreVolumeSnapshot._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_restore_volume_snapshot(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restore_volume_snapshot_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.restore_volume_snapshot", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "RestoreVolumeSnapshot", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _StartInstance(_BaseBareMetalSolutionRestTransport._BaseStartInstance, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.StartInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: instance.StartInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the start instance method over HTTP. - - Args: - request (~.instance.StartInstanceRequest): - The request object. Message requesting to start a server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseStartInstance._get_http_options() - - request, metadata = self._interceptor.pre_start_instance(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseStartInstance._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseStartInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseStartInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.StartInstance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "StartInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._StartInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_start_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_start_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.start_instance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "StartInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _StopInstance(_BaseBareMetalSolutionRestTransport._BaseStopInstance, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.StopInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: instance.StopInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the stop instance method over HTTP. - - Args: - request (~.instance.StopInstanceRequest): - The request object. Message requesting to stop a server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseStopInstance._get_http_options() - - request, metadata = self._interceptor.pre_stop_instance(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseStopInstance._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseStopInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseStopInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.StopInstance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "StopInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._StopInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_stop_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_stop_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.stop_instance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "StopInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SubmitProvisioningConfig(_BaseBareMetalSolutionRestTransport._BaseSubmitProvisioningConfig, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.SubmitProvisioningConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: provisioning.SubmitProvisioningConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> provisioning.SubmitProvisioningConfigResponse: - r"""Call the submit provisioning - config method over HTTP. - - Args: - request (~.provisioning.SubmitProvisioningConfigRequest): - The request object. Request for SubmitProvisioningConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.provisioning.SubmitProvisioningConfigResponse: - Response for - SubmitProvisioningConfig. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseSubmitProvisioningConfig._get_http_options() - - request, metadata = self._interceptor.pre_submit_provisioning_config(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseSubmitProvisioningConfig._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseSubmitProvisioningConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseSubmitProvisioningConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.SubmitProvisioningConfig", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "SubmitProvisioningConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._SubmitProvisioningConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = provisioning.SubmitProvisioningConfigResponse() - pb_resp = provisioning.SubmitProvisioningConfigResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_submit_provisioning_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_submit_provisioning_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = provisioning.SubmitProvisioningConfigResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.submit_provisioning_config", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "SubmitProvisioningConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateInstance(_BaseBareMetalSolutionRestTransport._BaseUpdateInstance, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.UpdateInstance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_instance.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update instance method over HTTP. - - Args: - request (~.gcb_instance.UpdateInstanceRequest): - The request object. Message requesting to updating a - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseUpdateInstance._get_http_options() - - request, metadata = self._interceptor.pre_update_instance(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.UpdateInstance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateInstance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_instance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.update_instance", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateInstance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateNetwork(_BaseBareMetalSolutionRestTransport._BaseUpdateNetwork, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.UpdateNetwork") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_network.UpdateNetworkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update network method over HTTP. - - Args: - request (~.gcb_network.UpdateNetworkRequest): - The request object. Message requesting to updating a - network. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseUpdateNetwork._get_http_options() - - request, metadata = self._interceptor.pre_update_network(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseUpdateNetwork._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseUpdateNetwork._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseUpdateNetwork._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.UpdateNetwork", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateNetwork", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._UpdateNetwork._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_network(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_network_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.update_network", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateNetwork", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateNfsShare(_BaseBareMetalSolutionRestTransport._BaseUpdateNfsShare, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.UpdateNfsShare") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_nfs_share.UpdateNfsShareRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update nfs share method over HTTP. - - Args: - request (~.gcb_nfs_share.UpdateNfsShareRequest): - The request object. Message requesting to updating an NFS - share. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseUpdateNfsShare._get_http_options() - - request, metadata = self._interceptor.pre_update_nfs_share(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseUpdateNfsShare._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseUpdateNfsShare._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseUpdateNfsShare._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.UpdateNfsShare", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateNfsShare", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._UpdateNfsShare._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_nfs_share(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_nfs_share_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.update_nfs_share", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateNfsShare", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateProvisioningConfig(_BaseBareMetalSolutionRestTransport._BaseUpdateProvisioningConfig, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.UpdateProvisioningConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: provisioning.UpdateProvisioningConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> provisioning.ProvisioningConfig: - r"""Call the update provisioning - config method over HTTP. - - Args: - request (~.provisioning.UpdateProvisioningConfigRequest): - The request object. Message for updating a - ProvisioningConfig. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.provisioning.ProvisioningConfig: - A provisioning configuration. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseUpdateProvisioningConfig._get_http_options() - - request, metadata = self._interceptor.pre_update_provisioning_config(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseUpdateProvisioningConfig._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseUpdateProvisioningConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseUpdateProvisioningConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.UpdateProvisioningConfig", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateProvisioningConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._UpdateProvisioningConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = provisioning.ProvisioningConfig() - pb_resp = provisioning.ProvisioningConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_provisioning_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_provisioning_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = provisioning.ProvisioningConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.update_provisioning_config", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateProvisioningConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateVolume(_BaseBareMetalSolutionRestTransport._BaseUpdateVolume, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.UpdateVolume") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcb_volume.UpdateVolumeRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update volume method over HTTP. - - Args: - request (~.gcb_volume.UpdateVolumeRequest): - The request object. Message for updating a volume. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseUpdateVolume._get_http_options() - - request, metadata = self._interceptor.pre_update_volume(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseUpdateVolume._get_transcoded_request(http_options, request) - - body = _BaseBareMetalSolutionRestTransport._BaseUpdateVolume._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseUpdateVolume._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.UpdateVolume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateVolume", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._UpdateVolume._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_volume(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_volume_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.update_volume", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "UpdateVolume", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_nfs_share(self) -> Callable[ - [gcb_nfs_share.CreateNfsShareRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateNfsShare(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_provisioning_config(self) -> Callable[ - [provisioning.CreateProvisioningConfigRequest], - provisioning.ProvisioningConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_ssh_key(self) -> Callable[ - [gcb_ssh_key.CreateSSHKeyRequest], - gcb_ssh_key.SSHKey]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSSHKey(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.CreateVolumeSnapshotRequest], - gcb_volume_snapshot.VolumeSnapshot]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_nfs_share(self) -> Callable[ - [nfs_share.DeleteNfsShareRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteNfsShare(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_ssh_key(self) -> Callable[ - [ssh_key.DeleteSSHKeyRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSSHKey(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_volume_snapshot(self) -> Callable[ - [volume_snapshot.DeleteVolumeSnapshotRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore - - @property - def detach_lun(self) -> Callable[ - [gcb_instance.DetachLunRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DetachLun(self._session, self._host, self._interceptor) # type: ignore - - @property - def disable_interactive_serial_console(self) -> Callable[ - [instance.DisableInteractiveSerialConsoleRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DisableInteractiveSerialConsole(self._session, self._host, self._interceptor) # type: ignore - - @property - def enable_interactive_serial_console(self) -> Callable[ - [instance.EnableInteractiveSerialConsoleRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._EnableInteractiveSerialConsole(self._session, self._host, self._interceptor) # type: ignore - - @property - def evict_lun(self) -> Callable[ - [lun.EvictLunRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._EvictLun(self._session, self._host, self._interceptor) # type: ignore - - @property - def evict_volume(self) -> Callable[ - [volume.EvictVolumeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._EvictVolume(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_instance(self) -> Callable[ - [instance.GetInstanceRequest], - instance.Instance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_lun(self) -> Callable[ - [lun.GetLunRequest], - lun.Lun]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetLun(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_network(self) -> Callable[ - [network.GetNetworkRequest], - network.Network]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetNetwork(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_nfs_share(self) -> Callable[ - [nfs_share.GetNfsShareRequest], - nfs_share.NfsShare]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetNfsShare(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_provisioning_config(self) -> Callable[ - [provisioning.GetProvisioningConfigRequest], - provisioning.ProvisioningConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_volume(self) -> Callable[ - [volume.GetVolumeRequest], - volume.Volume]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetVolume(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_volume_snapshot(self) -> Callable[ - [volume_snapshot.GetVolumeSnapshotRequest], - volume_snapshot.VolumeSnapshot]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_instances(self) -> Callable[ - [instance.ListInstancesRequest], - instance.ListInstancesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_luns(self) -> Callable[ - [lun.ListLunsRequest], - lun.ListLunsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLuns(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_networks(self) -> Callable[ - [network.ListNetworksRequest], - network.ListNetworksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListNetworks(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_network_usage(self) -> Callable[ - [network.ListNetworkUsageRequest], - network.ListNetworkUsageResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListNetworkUsage(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_nfs_shares(self) -> Callable[ - [nfs_share.ListNfsSharesRequest], - nfs_share.ListNfsSharesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListNfsShares(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_os_images(self) -> Callable[ - [osimage.ListOSImagesRequest], - osimage.ListOSImagesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListOSImages(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_provisioning_quotas(self) -> Callable[ - [provisioning.ListProvisioningQuotasRequest], - provisioning.ListProvisioningQuotasResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListProvisioningQuotas(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_ssh_keys(self) -> Callable[ - [ssh_key.ListSSHKeysRequest], - ssh_key.ListSSHKeysResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSSHKeys(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_volumes(self) -> Callable[ - [volume.ListVolumesRequest], - volume.ListVolumesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListVolumes(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_volume_snapshots(self) -> Callable[ - [volume_snapshot.ListVolumeSnapshotsRequest], - volume_snapshot.ListVolumeSnapshotsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListVolumeSnapshots(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_instance(self) -> Callable[ - [instance.RenameInstanceRequest], - instance.Instance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_network(self) -> Callable[ - [network.RenameNetworkRequest], - network.Network]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameNetwork(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_nfs_share(self) -> Callable[ - [nfs_share.RenameNfsShareRequest], - nfs_share.NfsShare]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameNfsShare(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_volume(self) -> Callable[ - [volume.RenameVolumeRequest], - volume.Volume]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameVolume(self._session, self._host, self._interceptor) # type: ignore - - @property - def reset_instance(self) -> Callable[ - [instance.ResetInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ResetInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def resize_volume(self) -> Callable[ - [gcb_volume.ResizeVolumeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ResizeVolume(self._session, self._host, self._interceptor) # type: ignore - - @property - def restore_volume_snapshot(self) -> Callable[ - [gcb_volume_snapshot.RestoreVolumeSnapshotRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RestoreVolumeSnapshot(self._session, self._host, self._interceptor) # type: ignore - - @property - def start_instance(self) -> Callable[ - [instance.StartInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StartInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def stop_instance(self) -> Callable[ - [instance.StopInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StopInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def submit_provisioning_config(self) -> Callable[ - [provisioning.SubmitProvisioningConfigRequest], - provisioning.SubmitProvisioningConfigResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SubmitProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_instance(self) -> Callable[ - [gcb_instance.UpdateInstanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_network(self) -> Callable[ - [gcb_network.UpdateNetworkRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateNetwork(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_nfs_share(self) -> Callable[ - [gcb_nfs_share.UpdateNfsShareRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateNfsShare(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_provisioning_config(self) -> Callable[ - [provisioning.UpdateProvisioningConfigRequest], - provisioning.ProvisioningConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateProvisioningConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_volume(self) -> Callable[ - [gcb_volume.UpdateVolumeRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateVolume(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseBareMetalSolutionRestTransport._BaseGetLocation, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.GetLocation", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseBareMetalSolutionRestTransport._BaseListLocations, BareMetalSolutionRestStub): - def __hash__(self): - return hash("BareMetalSolutionRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseBareMetalSolutionRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseBareMetalSolutionRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBareMetalSolutionRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.baremetalsolution_v2.BareMetalSolutionClient.ListLocations", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BareMetalSolutionRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.baremetalsolution_v2.BareMetalSolutionAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'BareMetalSolutionRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest_base.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest_base.py deleted file mode 100644 index 69c039295909..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/transports/rest_base.py +++ /dev/null @@ -1,2028 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import BareMetalSolutionTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.protobuf import empty_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseBareMetalSolutionRestTransport(BareMetalSolutionTransport): - """Base REST backend transport for BareMetalSolution. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'baremetalsolution.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'baremetalsolution.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateNfsShare: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/nfsShares', - 'body': 'nfs_share', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_nfs_share.CreateNfsShareRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseCreateNfsShare._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateProvisioningConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/provisioningConfigs', - 'body': 'provisioning_config', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = provisioning.CreateProvisioningConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseCreateProvisioningConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateSSHKey: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "sshKeyId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/sshKeys', - 'body': 'ssh_key', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_ssh_key.CreateSSHKeyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseCreateSSHKey._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateVolumeSnapshot: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/volumes/*}/snapshots', - 'body': 'volume_snapshot', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_volume_snapshot.CreateVolumeSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseCreateVolumeSnapshot._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteNfsShare: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/nfsShares/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = nfs_share.DeleteNfsShareRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseDeleteNfsShare._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteSSHKey: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/sshKeys/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = ssh_key.DeleteSSHKeyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseDeleteSSHKey._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteVolumeSnapshot: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume_snapshot.DeleteVolumeSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseDeleteVolumeSnapshot._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDetachLun: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{instance=projects/*/locations/*/instances/*}:detachLun', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_instance.DetachLunRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseDetachLun._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDisableInteractiveSerialConsole: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}:disableInteractiveSerialConsole', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.DisableInteractiveSerialConsoleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseDisableInteractiveSerialConsole._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseEnableInteractiveSerialConsole: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}:enableInteractiveSerialConsole', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.EnableInteractiveSerialConsoleRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseEnableInteractiveSerialConsole._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseEvictLun: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*/luns/*}:evict', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = lun.EvictLunRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseEvictLun._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseEvictVolume: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*}:evict', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume.EvictVolumeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseEvictVolume._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.GetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLun: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*/luns/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = lun.GetLunRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetLun._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetNetwork: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/networks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = network.GetNetworkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetNetwork._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetNfsShare: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/nfsShares/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = nfs_share.GetNfsShareRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetNfsShare._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetProvisioningConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/provisioningConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = provisioning.GetProvisioningConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetProvisioningConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetVolume: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume.GetVolumeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetVolume._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetVolumeSnapshot: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume_snapshot.GetVolumeSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseGetVolumeSnapshot._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListInstances: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/instances', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.ListInstancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListInstances._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListLuns: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*/volumes/*}/luns', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = lun.ListLunsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListLuns._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListNetworks: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/networks', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = network.ListNetworksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListNetworks._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListNetworkUsage: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{location=projects/*/locations/*}/networks:listNetworkUsage', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = network.ListNetworkUsageRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListNetworkUsage._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListNfsShares: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/nfsShares', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = nfs_share.ListNfsSharesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListNfsShares._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListOSImages: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/osImages', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = osimage.ListOSImagesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListOSImages._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListProvisioningQuotas: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/provisioningQuotas', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = provisioning.ListProvisioningQuotasRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListProvisioningQuotas._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListSSHKeys: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/sshKeys', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = ssh_key.ListSSHKeysRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListSSHKeys._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListVolumes: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/volumes', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume.ListVolumesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListVolumes._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListVolumeSnapshots: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*/volumes/*}/snapshots', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume_snapshot.ListVolumeSnapshotsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseListVolumeSnapshots._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.RenameInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseRenameInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameNetwork: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/networks/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = network.RenameNetworkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseRenameNetwork._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameNfsShare: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/nfsShares/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = nfs_share.RenameNfsShareRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseRenameNfsShare._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameVolume: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/volumes/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = volume.RenameVolumeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseRenameVolume._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseResetInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}:reset', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.ResetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseResetInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseResizeVolume: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{volume=projects/*/locations/*/volumes/*}:resize', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_volume.ResizeVolumeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseResizeVolume._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRestoreVolumeSnapshot: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{volume_snapshot=projects/*/locations/*/volumes/*/snapshots/*}:restoreVolumeSnapshot', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseRestoreVolumeSnapshot._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseStartInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}:start', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.StartInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseStartInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseStopInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/instances/*}:stop', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = instance.StopInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseStopInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSubmitProvisioningConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/provisioningConfigs:submit', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = provisioning.SubmitProvisioningConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseSubmitProvisioningConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateInstance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_instance.UpdateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateNetwork: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{network.name=projects/*/locations/*/networks/*}', - 'body': 'network', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_network.UpdateNetworkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseUpdateNetwork._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateNfsShare: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{nfs_share.name=projects/*/locations/*/nfsShares/*}', - 'body': 'nfs_share', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_nfs_share.UpdateNfsShareRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseUpdateNfsShare._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateProvisioningConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{provisioning_config.name=projects/*/locations/*/provisioningConfigs/*}', - 'body': 'provisioning_config', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = provisioning.UpdateProvisioningConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseUpdateProvisioningConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateVolume: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{volume.name=projects/*/locations/*/volumes/*}', - 'body': 'volume', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcb_volume.UpdateVolumeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBareMetalSolutionRestTransport._BaseUpdateVolume._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseBareMetalSolutionRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/__init__.py deleted file mode 100644 index eb468c18af93..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/__init__.py +++ /dev/null @@ -1,206 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .baremetalsolution import ( - OperationMetadata, - ResetInstanceResponse, -) -from .common import ( - VolumePerformanceTier, - WorkloadProfile, -) -from .instance import ( - DetachLunRequest, - DisableInteractiveSerialConsoleRequest, - DisableInteractiveSerialConsoleResponse, - EnableInteractiveSerialConsoleRequest, - EnableInteractiveSerialConsoleResponse, - GetInstanceRequest, - Instance, - ListInstancesRequest, - ListInstancesResponse, - RenameInstanceRequest, - ResetInstanceRequest, - ServerNetworkTemplate, - StartInstanceRequest, - StartInstanceResponse, - StopInstanceRequest, - StopInstanceResponse, - UpdateInstanceRequest, -) -from .lun import ( - EvictLunRequest, - GetLunRequest, - ListLunsRequest, - ListLunsResponse, - Lun, -) -from .network import ( - GetNetworkRequest, - ListNetworksRequest, - ListNetworksResponse, - ListNetworkUsageRequest, - ListNetworkUsageResponse, - LogicalInterface, - Network, - NetworkAddressReservation, - NetworkMountPoint, - NetworkUsage, - RenameNetworkRequest, - UpdateNetworkRequest, - VRF, -) -from .nfs_share import ( - CreateNfsShareRequest, - DeleteNfsShareRequest, - GetNfsShareRequest, - ListNfsSharesRequest, - ListNfsSharesResponse, - NfsShare, - RenameNfsShareRequest, - UpdateNfsShareRequest, -) -from .osimage import ( - ListOSImagesRequest, - ListOSImagesResponse, - OSImage, -) -from .provisioning import ( - CreateProvisioningConfigRequest, - GetProvisioningConfigRequest, - InstanceConfig, - InstanceQuota, - ListProvisioningQuotasRequest, - ListProvisioningQuotasResponse, - NetworkConfig, - ProvisioningConfig, - ProvisioningQuota, - SubmitProvisioningConfigRequest, - SubmitProvisioningConfigResponse, - UpdateProvisioningConfigRequest, - VolumeConfig, -) -from .ssh_key import ( - CreateSSHKeyRequest, - DeleteSSHKeyRequest, - ListSSHKeysRequest, - ListSSHKeysResponse, - SSHKey, -) -from .volume import ( - EvictVolumeRequest, - GetVolumeRequest, - ListVolumesRequest, - ListVolumesResponse, - RenameVolumeRequest, - ResizeVolumeRequest, - UpdateVolumeRequest, - Volume, -) -from .volume_snapshot import ( - CreateVolumeSnapshotRequest, - DeleteVolumeSnapshotRequest, - GetVolumeSnapshotRequest, - ListVolumeSnapshotsRequest, - ListVolumeSnapshotsResponse, - RestoreVolumeSnapshotRequest, - VolumeSnapshot, -) - -__all__ = ( - 'OperationMetadata', - 'ResetInstanceResponse', - 'VolumePerformanceTier', - 'WorkloadProfile', - 'DetachLunRequest', - 'DisableInteractiveSerialConsoleRequest', - 'DisableInteractiveSerialConsoleResponse', - 'EnableInteractiveSerialConsoleRequest', - 'EnableInteractiveSerialConsoleResponse', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'RenameInstanceRequest', - 'ResetInstanceRequest', - 'ServerNetworkTemplate', - 'StartInstanceRequest', - 'StartInstanceResponse', - 'StopInstanceRequest', - 'StopInstanceResponse', - 'UpdateInstanceRequest', - 'EvictLunRequest', - 'GetLunRequest', - 'ListLunsRequest', - 'ListLunsResponse', - 'Lun', - 'GetNetworkRequest', - 'ListNetworksRequest', - 'ListNetworksResponse', - 'ListNetworkUsageRequest', - 'ListNetworkUsageResponse', - 'LogicalInterface', - 'Network', - 'NetworkAddressReservation', - 'NetworkMountPoint', - 'NetworkUsage', - 'RenameNetworkRequest', - 'UpdateNetworkRequest', - 'VRF', - 'CreateNfsShareRequest', - 'DeleteNfsShareRequest', - 'GetNfsShareRequest', - 'ListNfsSharesRequest', - 'ListNfsSharesResponse', - 'NfsShare', - 'RenameNfsShareRequest', - 'UpdateNfsShareRequest', - 'ListOSImagesRequest', - 'ListOSImagesResponse', - 'OSImage', - 'CreateProvisioningConfigRequest', - 'GetProvisioningConfigRequest', - 'InstanceConfig', - 'InstanceQuota', - 'ListProvisioningQuotasRequest', - 'ListProvisioningQuotasResponse', - 'NetworkConfig', - 'ProvisioningConfig', - 'ProvisioningQuota', - 'SubmitProvisioningConfigRequest', - 'SubmitProvisioningConfigResponse', - 'UpdateProvisioningConfigRequest', - 'VolumeConfig', - 'CreateSSHKeyRequest', - 'DeleteSSHKeyRequest', - 'ListSSHKeysRequest', - 'ListSSHKeysResponse', - 'SSHKey', - 'EvictVolumeRequest', - 'GetVolumeRequest', - 'ListVolumesRequest', - 'ListVolumesResponse', - 'RenameVolumeRequest', - 'ResizeVolumeRequest', - 'UpdateVolumeRequest', - 'Volume', - 'CreateVolumeSnapshotRequest', - 'DeleteVolumeSnapshotRequest', - 'GetVolumeSnapshotRequest', - 'ListVolumeSnapshotsRequest', - 'ListVolumeSnapshotsResponse', - 'RestoreVolumeSnapshotRequest', - 'VolumeSnapshot', -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py deleted file mode 100644 index 2819ddd525ae..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/baremetalsolution.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'OperationMetadata', - 'ResetInstanceResponse', - }, -) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata from a long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the action executed by - the operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user requested the - cancellation of the operation. Operations that have been - successfully cancelled have [Operation.error][] value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used with the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ResetInstanceResponse(proto.Message): - r"""Response message from resetting a server. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/common.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/common.py deleted file mode 100644 index cc80b54a0188..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/common.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'VolumePerformanceTier', - 'WorkloadProfile', - }, -) - - -class VolumePerformanceTier(proto.Enum): - r"""Performance tier of the Volume. - - Values: - VOLUME_PERFORMANCE_TIER_UNSPECIFIED (0): - Value is not specified. - VOLUME_PERFORMANCE_TIER_SHARED (1): - Regular volumes, shared aggregates. - VOLUME_PERFORMANCE_TIER_ASSIGNED (2): - Assigned aggregates. - VOLUME_PERFORMANCE_TIER_HT (3): - High throughput aggregates. - """ - VOLUME_PERFORMANCE_TIER_UNSPECIFIED = 0 - VOLUME_PERFORMANCE_TIER_SHARED = 1 - VOLUME_PERFORMANCE_TIER_ASSIGNED = 2 - VOLUME_PERFORMANCE_TIER_HT = 3 - - -class WorkloadProfile(proto.Enum): - r"""The possible values for a workload profile. - - Values: - WORKLOAD_PROFILE_UNSPECIFIED (0): - The workload profile is in an unknown state. - WORKLOAD_PROFILE_GENERIC (1): - The workload profile is generic. - WORKLOAD_PROFILE_HANA (2): - The workload profile is hana. - """ - WORKLOAD_PROFILE_UNSPECIFIED = 0 - WORKLOAD_PROFILE_GENERIC = 1 - WORKLOAD_PROFILE_HANA = 2 - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/instance.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/instance.py deleted file mode 100644 index 0ee38c9a07a8..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/instance.py +++ /dev/null @@ -1,572 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bare_metal_solution_v2.types import common -from google.cloud.bare_metal_solution_v2.types import lun as gcb_lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import volume -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'Instance', - 'GetInstanceRequest', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'UpdateInstanceRequest', - 'RenameInstanceRequest', - 'ResetInstanceRequest', - 'StartInstanceRequest', - 'StartInstanceResponse', - 'StopInstanceRequest', - 'StopInstanceResponse', - 'EnableInteractiveSerialConsoleRequest', - 'EnableInteractiveSerialConsoleResponse', - 'DisableInteractiveSerialConsoleRequest', - 'DisableInteractiveSerialConsoleResponse', - 'DetachLunRequest', - 'ServerNetworkTemplate', - }, -) - - -class Instance(proto.Message): - r"""A server. - - Attributes: - name (str): - Immutable. The resource name of this ``Instance``. Resource - names are schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. Format: - ``projects/{project}/locations/{location}/instances/{instance}`` - id (str): - Output only. An identifier for the ``Instance``, generated - by the backend. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Create a time stamp. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Update a time stamp. - machine_type (str): - Immutable. The server type. `Available server - types `__ - state (google.cloud.bare_metal_solution_v2.types.Instance.State): - Output only. The state of the server. - hyperthreading_enabled (bool): - True if you enable hyperthreading for the - server, otherwise false. The default value is - false. - labels (MutableMapping[str, str]): - Labels as key value pairs. - luns (MutableSequence[google.cloud.bare_metal_solution_v2.types.Lun]): - Immutable. List of LUNs associated with this - server. - volumes (MutableSequence[google.cloud.bare_metal_solution_v2.types.Volume]): - Input only. List of Volumes to attach to this - Instance on creation. This field won't be - populated in Get/List responses. - networks (MutableSequence[google.cloud.bare_metal_solution_v2.types.Network]): - Output only. List of networks associated with - this server. - interactive_serial_console_enabled (bool): - Output only. True if the interactive serial - console feature is enabled for the instance, - false otherwise. The default value is false. - os_image (str): - The OS image currently installed on the - server. - pod (str): - Immutable. Pod name. - Pod is an independent part of infrastructure. - Instance can be connected to the assets - (networks, volumes) allocated in the same pod - only. - network_template (str): - Instance network template name. For eg, bondaa-bondaa, - bondab-nic, etc. Generally, the template name follows the - syntax of "bond" or "nic". - logical_interfaces (MutableSequence[google.cloud.bare_metal_solution_v2.types.LogicalInterface]): - List of logical interfaces for the instance. The number of - logical interfaces will be the same as number of hardware - bond/nic on the chosen network template. For the - non-multivlan configurations (for eg, existing servers) that - use existing default network template (bondaa-bondaa), both - the Instance.networks field and the - Instance.logical_interfaces fields will be filled to ensure - backward compatibility. For the others, only - Instance.logical_interfaces will be filled. - login_info (str): - Output only. Text field about info for - logging in. - workload_profile (google.cloud.bare_metal_solution_v2.types.WorkloadProfile): - The workload profile for the instance. - firmware_version (str): - Output only. The firmware version for the - instance. - """ - class State(proto.Enum): - r"""The possible states for this server. - - Values: - STATE_UNSPECIFIED (0): - The server is in an unknown state. - PROVISIONING (1): - The server is being provisioned. - RUNNING (2): - The server is running. - DELETED (3): - The server has been deleted. - UPDATING (4): - The server is being updated. - STARTING (5): - The server is starting. - STOPPING (6): - The server is stopping. - SHUTDOWN (7): - The server is shutdown. - """ - STATE_UNSPECIFIED = 0 - PROVISIONING = 1 - RUNNING = 2 - DELETED = 3 - UPDATING = 4 - STARTING = 5 - STOPPING = 6 - SHUTDOWN = 7 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=11, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - machine_type: str = proto.Field( - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - hyperthreading_enabled: bool = proto.Field( - proto.BOOL, - number=6, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - luns: MutableSequence[gcb_lun.Lun] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=gcb_lun.Lun, - ) - volumes: MutableSequence[volume.Volume] = proto.RepeatedField( - proto.MESSAGE, - number=16, - message=volume.Volume, - ) - networks: MutableSequence[network.Network] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=network.Network, - ) - interactive_serial_console_enabled: bool = proto.Field( - proto.BOOL, - number=10, - ) - os_image: str = proto.Field( - proto.STRING, - number=12, - ) - pod: str = proto.Field( - proto.STRING, - number=13, - ) - network_template: str = proto.Field( - proto.STRING, - number=14, - ) - logical_interfaces: MutableSequence[network.LogicalInterface] = proto.RepeatedField( - proto.MESSAGE, - number=15, - message=network.LogicalInterface, - ) - login_info: str = proto.Field( - proto.STRING, - number=17, - ) - workload_profile: common.WorkloadProfile = proto.Field( - proto.ENUM, - number=18, - enum=common.WorkloadProfile, - ) - firmware_version: str = proto.Field( - proto.STRING, - number=19, - ) - - -class GetInstanceRequest(proto.Message): - r"""Message for requesting server information. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListInstancesRequest(proto.Message): - r"""Message for requesting the list of servers. - - Attributes: - parent (str): - Required. Parent value for - ListInstancesRequest. - page_size (int): - Requested page size. Server may return fewer - items than requested. If unspecified, the server - will pick an appropriate default. - page_token (str): - A token identifying a page of results from - the server. - filter (str): - List filter. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListInstancesResponse(proto.Message): - r"""Response message for the list of servers. - - Attributes: - instances (MutableSequence[google.cloud.bare_metal_solution_v2.types.Instance]): - The list of servers. - next_page_token (str): - A token identifying a page of results from - the server. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - instances: MutableSequence['Instance'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Instance', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateInstanceRequest(proto.Message): - r"""Message requesting to updating a server. - - Attributes: - instance (google.cloud.bare_metal_solution_v2.types.Instance): - Required. The server to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/instances/{instance} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The currently supported fields - are: ``labels`` ``hyperthreading_enabled`` ``os_image`` - """ - - instance: 'Instance' = proto.Field( - proto.MESSAGE, - number=1, - message='Instance', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RenameInstanceRequest(proto.Message): - r"""Message requesting rename of a server. - - Attributes: - name (str): - Required. The ``name`` field is used to identify the - instance. Format: - projects/{project}/locations/{location}/instances/{instance} - new_instance_id (str): - Required. The new ``id`` of the instance. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ResetInstanceRequest(proto.Message): - r"""Message requesting to reset a server. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StartInstanceRequest(proto.Message): - r"""Message requesting to start a server. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StartInstanceResponse(proto.Message): - r"""Response message from starting a server. - """ - - -class StopInstanceRequest(proto.Message): - r"""Message requesting to stop a server. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StopInstanceResponse(proto.Message): - r"""Response message from stopping a server. - """ - - -class EnableInteractiveSerialConsoleRequest(proto.Message): - r"""Message for enabling the interactive serial console on an - instance. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class EnableInteractiveSerialConsoleResponse(proto.Message): - r"""Message for response of EnableInteractiveSerialConsole. - """ - - -class DisableInteractiveSerialConsoleRequest(proto.Message): - r"""Message for disabling the interactive serial console on an - instance. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DisableInteractiveSerialConsoleResponse(proto.Message): - r"""Message for response of DisableInteractiveSerialConsole. - """ - - -class DetachLunRequest(proto.Message): - r"""Message for detach specific LUN from an Instance. - - Attributes: - instance (str): - Required. Name of the instance. - lun (str): - Required. Name of the Lun to detach. - skip_reboot (bool): - If true, performs lun unmapping without - instance reboot. - """ - - instance: str = proto.Field( - proto.STRING, - number=1, - ) - lun: str = proto.Field( - proto.STRING, - number=2, - ) - skip_reboot: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ServerNetworkTemplate(proto.Message): - r"""Network template. - - Attributes: - name (str): - Output only. Template's unique name. The full resource name - follows the pattern: - ``projects/{project}/locations/{location}/serverNetworkTemplate/{server_network_template}`` - Generally, the {server_network_template} follows the syntax - of "bond" or - "nic". - applicable_instance_types (MutableSequence[str]): - Instance types this template is applicable - to. - logical_interfaces (MutableSequence[google.cloud.bare_metal_solution_v2.types.ServerNetworkTemplate.LogicalInterface]): - Logical interfaces. - """ - - class LogicalInterface(proto.Message): - r"""Logical interface. - - Attributes: - name (str): - Interface name. This is not a globally unique identifier. - Name is unique only inside the ServerNetworkTemplate. This - is of syntax or - and forms part of the network - template name. - type_ (google.cloud.bare_metal_solution_v2.types.ServerNetworkTemplate.LogicalInterface.InterfaceType): - Interface type. - required (bool): - If true, interface must have network - connected. - """ - class InterfaceType(proto.Enum): - r"""Interface type. - - Values: - INTERFACE_TYPE_UNSPECIFIED (0): - Unspecified value. - BOND (1): - Bond interface type. - NIC (2): - NIC interface type. - """ - INTERFACE_TYPE_UNSPECIFIED = 0 - BOND = 1 - NIC = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: 'ServerNetworkTemplate.LogicalInterface.InterfaceType' = proto.Field( - proto.ENUM, - number=2, - enum='ServerNetworkTemplate.LogicalInterface.InterfaceType', - ) - required: bool = proto.Field( - proto.BOOL, - number=3, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - applicable_instance_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - logical_interfaces: MutableSequence[LogicalInterface] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=LogicalInterface, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/lun.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/lun.py deleted file mode 100644 index 012113770f4d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/lun.py +++ /dev/null @@ -1,268 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'Lun', - 'GetLunRequest', - 'ListLunsRequest', - 'ListLunsResponse', - 'EvictLunRequest', - }, -) - - -class Lun(proto.Message): - r"""A storage volume logical unit number (LUN). - - Attributes: - name (str): - Output only. The name of the LUN. - id (str): - An identifier for the LUN, generated by the - backend. - state (google.cloud.bare_metal_solution_v2.types.Lun.State): - The state of this storage volume. - size_gb (int): - The size of this LUN, in gigabytes. - multiprotocol_type (google.cloud.bare_metal_solution_v2.types.Lun.MultiprotocolType): - The LUN multiprotocol type ensures the - characteristics of the LUN are optimized for - each operating system. - storage_volume (str): - Display the storage volume for this LUN. - shareable (bool): - Display if this LUN can be shared between - multiple physical servers. - boot_lun (bool): - Display if this LUN is a boot LUN. - storage_type (google.cloud.bare_metal_solution_v2.types.Lun.StorageType): - The storage type for this LUN. - wwid (str): - The WWID for this LUN. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time after which LUN will be fully deleted. It - is filled only for LUNs in COOL_OFF state. - instances (MutableSequence[str]): - Output only. Instances this Lun is attached - to. - """ - class State(proto.Enum): - r"""The possible states for the LUN. - - Values: - STATE_UNSPECIFIED (0): - The LUN is in an unknown state. - CREATING (1): - The LUN is being created. - UPDATING (2): - The LUN is being updated. - READY (3): - The LUN is ready for use. - DELETING (4): - The LUN has been requested to be deleted. - COOL_OFF (5): - The LUN is in cool off state. It will be deleted after - ``expire_time``. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - UPDATING = 2 - READY = 3 - DELETING = 4 - COOL_OFF = 5 - - class MultiprotocolType(proto.Enum): - r"""Display the operating systems present for the LUN - multiprotocol type. - - Values: - MULTIPROTOCOL_TYPE_UNSPECIFIED (0): - Server has no OS specified. - LINUX (1): - Server with Linux OS. - """ - MULTIPROTOCOL_TYPE_UNSPECIFIED = 0 - LINUX = 1 - - class StorageType(proto.Enum): - r"""The storage types for a LUN. - - Values: - STORAGE_TYPE_UNSPECIFIED (0): - The storage type for this LUN is unknown. - SSD (1): - This storage type for this LUN is SSD. - HDD (2): - This storage type for this LUN is HDD. - """ - STORAGE_TYPE_UNSPECIFIED = 0 - SSD = 1 - HDD = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=10, - ) - state: State = proto.Field( - proto.ENUM, - number=2, - enum=State, - ) - size_gb: int = proto.Field( - proto.INT64, - number=3, - ) - multiprotocol_type: MultiprotocolType = proto.Field( - proto.ENUM, - number=4, - enum=MultiprotocolType, - ) - storage_volume: str = proto.Field( - proto.STRING, - number=5, - ) - shareable: bool = proto.Field( - proto.BOOL, - number=6, - ) - boot_lun: bool = proto.Field( - proto.BOOL, - number=7, - ) - storage_type: StorageType = proto.Field( - proto.ENUM, - number=8, - enum=StorageType, - ) - wwid: str = proto.Field( - proto.STRING, - number=9, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - instances: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=12, - ) - - -class GetLunRequest(proto.Message): - r"""Message for requesting storage lun information. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLunsRequest(proto.Message): - r"""Message for requesting a list of storage volume luns. - - Attributes: - parent (str): - Required. Parent value for ListLunsRequest. - page_size (int): - Requested page size. The server might return - fewer items than requested. If unspecified, - server will pick an appropriate default. - page_token (str): - A token identifying a page of results from - the server. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListLunsResponse(proto.Message): - r"""Response message containing the list of storage volume luns. - - Attributes: - luns (MutableSequence[google.cloud.bare_metal_solution_v2.types.Lun]): - The list of luns. - next_page_token (str): - A token identifying a page of results from - the server. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - luns: MutableSequence['Lun'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Lun', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class EvictLunRequest(proto.Message): - r"""Request for skip lun cooloff and delete it. - - Attributes: - name (str): - Required. The name of the lun. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/network.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/network.py deleted file mode 100644 index bc692c3790ad..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/network.py +++ /dev/null @@ -1,649 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'Network', - 'NetworkAddressReservation', - 'VRF', - 'LogicalInterface', - 'GetNetworkRequest', - 'ListNetworksRequest', - 'ListNetworksResponse', - 'UpdateNetworkRequest', - 'NetworkUsage', - 'ListNetworkUsageRequest', - 'ListNetworkUsageResponse', - 'NetworkMountPoint', - 'RenameNetworkRequest', - }, -) - - -class Network(proto.Message): - r"""A Network. - - Attributes: - name (str): - Output only. The resource name of this ``Network``. Resource - names are schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. Format: - ``projects/{project}/locations/{location}/networks/{network}`` - id (str): - An identifier for the ``Network``, generated by the backend. - type_ (google.cloud.bare_metal_solution_v2.types.Network.Type): - The type of this network. - ip_address (str): - IP address configured. - mac_address (MutableSequence[str]): - List of physical interfaces. - state (google.cloud.bare_metal_solution_v2.types.Network.State): - The Network state. - vlan_id (str): - The vlan id of the Network. - cidr (str): - The cidr of the Network. - vrf (google.cloud.bare_metal_solution_v2.types.VRF): - The vrf for the Network. - labels (MutableMapping[str, str]): - Labels as key value pairs. - services_cidr (str): - IP range for reserved for services (e.g. - NFS). - reservations (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkAddressReservation]): - List of IP address reservations in this - network. When updating this field, an error will - be generated if a reservation conflicts with an - IP address already allocated to a physical - server. - pod (str): - Output only. Pod name. - mount_points (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkMountPoint]): - Input only. List of mount points to attach - the network to. - jumbo_frames_enabled (bool): - Whether network uses standard frames or jumbo - ones. - gateway_ip (str): - Output only. Gateway ip address. - """ - class Type(proto.Enum): - r"""Network type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified value. - CLIENT (1): - Client network, a network peered to a Google - Cloud VPC. - PRIVATE (2): - Private network, a network local to the Bare - Metal Solution environment. - """ - TYPE_UNSPECIFIED = 0 - CLIENT = 1 - PRIVATE = 2 - - class State(proto.Enum): - r"""The possible states for this Network. - - Values: - STATE_UNSPECIFIED (0): - The Network is in an unknown state. - PROVISIONING (1): - The Network is provisioning. - PROVISIONED (2): - The Network has been provisioned. - DEPROVISIONING (3): - The Network is being deprovisioned. - UPDATING (4): - The Network is being updated. - """ - STATE_UNSPECIFIED = 0 - PROVISIONING = 1 - PROVISIONED = 2 - DEPROVISIONING = 3 - UPDATING = 4 - - name: str = proto.Field( - proto.STRING, - number=5, - ) - id: str = proto.Field( - proto.STRING, - number=10, - ) - type_: Type = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - ip_address: str = proto.Field( - proto.STRING, - number=3, - ) - mac_address: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - vlan_id: str = proto.Field( - proto.STRING, - number=7, - ) - cidr: str = proto.Field( - proto.STRING, - number=8, - ) - vrf: 'VRF' = proto.Field( - proto.MESSAGE, - number=9, - message='VRF', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=11, - ) - services_cidr: str = proto.Field( - proto.STRING, - number=12, - ) - reservations: MutableSequence['NetworkAddressReservation'] = proto.RepeatedField( - proto.MESSAGE, - number=13, - message='NetworkAddressReservation', - ) - pod: str = proto.Field( - proto.STRING, - number=14, - ) - mount_points: MutableSequence['NetworkMountPoint'] = proto.RepeatedField( - proto.MESSAGE, - number=15, - message='NetworkMountPoint', - ) - jumbo_frames_enabled: bool = proto.Field( - proto.BOOL, - number=16, - ) - gateway_ip: str = proto.Field( - proto.STRING, - number=17, - ) - - -class NetworkAddressReservation(proto.Message): - r"""A reservation of one or more addresses in a network. - - Attributes: - start_address (str): - The first address of this reservation block. - Must be specified as a single IPv4 address, e.g. - 10.1.2.2. - end_address (str): - The last address of this reservation block, inclusive. I.e., - for cases when reservations are only single addresses, - end_address and start_address will be the same. Must be - specified as a single IPv4 address, e.g. 10.1.2.2. - note (str): - A note about this reservation, intended for - human consumption. - """ - - start_address: str = proto.Field( - proto.STRING, - number=1, - ) - end_address: str = proto.Field( - proto.STRING, - number=2, - ) - note: str = proto.Field( - proto.STRING, - number=3, - ) - - -class VRF(proto.Message): - r"""A network VRF. - - Attributes: - name (str): - The name of the VRF. - state (google.cloud.bare_metal_solution_v2.types.VRF.State): - The possible state of VRF. - qos_policy (google.cloud.bare_metal_solution_v2.types.VRF.QosPolicy): - The QOS policy applied to this VRF. - The value is only meaningful when all the vlan - attachments have the same QoS. This field should - not be used for new integrations, use vlan - attachment level qos instead. The field is left - for backward-compatibility. - vlan_attachments (MutableSequence[google.cloud.bare_metal_solution_v2.types.VRF.VlanAttachment]): - The list of VLAN attachments for the VRF. - """ - class State(proto.Enum): - r"""The possible states for this VRF. - - Values: - STATE_UNSPECIFIED (0): - The unspecified state. - PROVISIONING (1): - The vrf is provisioning. - PROVISIONED (2): - The vrf is provisioned. - """ - STATE_UNSPECIFIED = 0 - PROVISIONING = 1 - PROVISIONED = 2 - - class QosPolicy(proto.Message): - r"""QOS policy parameters. - - Attributes: - bandwidth_gbps (float): - The bandwidth permitted by the QOS policy, in - gbps. - """ - - bandwidth_gbps: float = proto.Field( - proto.DOUBLE, - number=1, - ) - - class VlanAttachment(proto.Message): - r"""VLAN attachment details. - - Attributes: - peer_vlan_id (int): - The peer vlan ID of the attachment. - peer_ip (str): - The peer IP of the attachment. - router_ip (str): - The router IP of the attachment. - pairing_key (str): - Input only. Pairing key. - qos_policy (google.cloud.bare_metal_solution_v2.types.VRF.QosPolicy): - The QOS policy applied to this VLAN - attachment. This value should be preferred to - using qos at vrf level. - id (str): - Immutable. The identifier of the attachment - within vrf. - interconnect_attachment (str): - Optional. The name of the vlan attachment within vrf. This - is of the form - projects/{project_number}/regions/{region}/interconnectAttachments/{interconnect_attachment} - """ - - peer_vlan_id: int = proto.Field( - proto.INT64, - number=1, - ) - peer_ip: str = proto.Field( - proto.STRING, - number=2, - ) - router_ip: str = proto.Field( - proto.STRING, - number=3, - ) - pairing_key: str = proto.Field( - proto.STRING, - number=4, - ) - qos_policy: 'VRF.QosPolicy' = proto.Field( - proto.MESSAGE, - number=5, - message='VRF.QosPolicy', - ) - id: str = proto.Field( - proto.STRING, - number=6, - ) - interconnect_attachment: str = proto.Field( - proto.STRING, - number=7, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - qos_policy: QosPolicy = proto.Field( - proto.MESSAGE, - number=6, - message=QosPolicy, - ) - vlan_attachments: MutableSequence[VlanAttachment] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=VlanAttachment, - ) - - -class LogicalInterface(proto.Message): - r"""Each logical interface represents a logical abstraction of - the underlying physical interface (for eg. bond, nic) of the - instance. Each logical interface can effectively map to multiple - network-IP pairs and still be mapped to one underlying physical - interface. - - Attributes: - logical_network_interfaces (MutableSequence[google.cloud.bare_metal_solution_v2.types.LogicalInterface.LogicalNetworkInterface]): - List of logical network interfaces within a - logical interface. - name (str): - Interface name. This is of syntax or and forms - part of the network template name. - interface_index (int): - The index of the logical interface mapping to - the index of the hardware bond or nic on the - chosen network template. This field is - deprecated. - """ - - class LogicalNetworkInterface(proto.Message): - r"""Each logical network interface is effectively a network and - IP pair. - - Attributes: - network (str): - Name of the network - ip_address (str): - IP address in the network - default_gateway (bool): - Whether this interface is the default gateway - for the instance. Only one interface can be the - default gateway for the instance. - network_type (google.cloud.bare_metal_solution_v2.types.Network.Type): - Type of network. - id (str): - An identifier for the ``Network``, generated by the backend. - """ - - network: str = proto.Field( - proto.STRING, - number=1, - ) - ip_address: str = proto.Field( - proto.STRING, - number=2, - ) - default_gateway: bool = proto.Field( - proto.BOOL, - number=3, - ) - network_type: 'Network.Type' = proto.Field( - proto.ENUM, - number=4, - enum='Network.Type', - ) - id: str = proto.Field( - proto.STRING, - number=5, - ) - - logical_network_interfaces: MutableSequence[LogicalNetworkInterface] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=LogicalNetworkInterface, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - interface_index: int = proto.Field( - proto.INT32, - number=3, - ) - - -class GetNetworkRequest(proto.Message): - r"""Message for requesting network information. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListNetworksRequest(proto.Message): - r"""Message for requesting a list of networks. - - Attributes: - parent (str): - Required. Parent value for - ListNetworksRequest. - page_size (int): - Requested page size. The server might return - fewer items than requested. If unspecified, - server will pick an appropriate default. - page_token (str): - A token identifying a page of results from - the server. - filter (str): - List filter. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListNetworksResponse(proto.Message): - r"""Response message containing the list of networks. - - Attributes: - networks (MutableSequence[google.cloud.bare_metal_solution_v2.types.Network]): - The list of networks. - next_page_token (str): - A token identifying a page of results from - the server. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - networks: MutableSequence['Network'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Network', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateNetworkRequest(proto.Message): - r"""Message requesting to updating a network. - - Attributes: - network (google.cloud.bare_metal_solution_v2.types.Network): - Required. The network to update. - - The ``name`` field is used to identify the instance to - update. Format: - projects/{project}/locations/{location}/networks/{network} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently supported - fields are: ``labels``, ``reservations``, - ``vrf.vlan_attachments`` - """ - - network: 'Network' = proto.Field( - proto.MESSAGE, - number=1, - message='Network', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class NetworkUsage(proto.Message): - r"""Network with all used IP addresses. - - Attributes: - network (google.cloud.bare_metal_solution_v2.types.Network): - Network. - used_ips (MutableSequence[str]): - All used IP addresses in this network. - """ - - network: 'Network' = proto.Field( - proto.MESSAGE, - number=1, - message='Network', - ) - used_ips: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class ListNetworkUsageRequest(proto.Message): - r"""Request to get networks with IPs. - - Attributes: - location (str): - Required. Parent value (project and - location). - """ - - location: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListNetworkUsageResponse(proto.Message): - r"""Response with Networks with IPs - - Attributes: - networks (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkUsage]): - Networks with IPs. - """ - - networks: MutableSequence['NetworkUsage'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='NetworkUsage', - ) - - -class NetworkMountPoint(proto.Message): - r"""Mount point for a network. - - Attributes: - instance (str): - Instance to attach network to. - logical_interface (str): - Logical interface to detach from. - default_gateway (bool): - Network should be a default gateway. - ip_address (str): - Ip address of the server. - """ - - instance: str = proto.Field( - proto.STRING, - number=1, - ) - logical_interface: str = proto.Field( - proto.STRING, - number=2, - ) - default_gateway: bool = proto.Field( - proto.BOOL, - number=3, - ) - ip_address: str = proto.Field( - proto.STRING, - number=4, - ) - - -class RenameNetworkRequest(proto.Message): - r"""Message requesting rename of a server. - - Attributes: - name (str): - Required. The ``name`` field is used to identify the - network. Format: - projects/{project}/locations/{location}/networks/{network} - new_network_id (str): - Required. The new ``id`` of the network. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_network_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/nfs_share.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/nfs_share.py deleted file mode 100644 index e40d12fc30d2..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/nfs_share.py +++ /dev/null @@ -1,399 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'NfsShare', - 'GetNfsShareRequest', - 'ListNfsSharesRequest', - 'ListNfsSharesResponse', - 'UpdateNfsShareRequest', - 'RenameNfsShareRequest', - 'CreateNfsShareRequest', - 'DeleteNfsShareRequest', - }, -) - - -class NfsShare(proto.Message): - r"""An NFS share. - - Attributes: - name (str): - Immutable. The name of the NFS share. - nfs_share_id (str): - Output only. An identifier for the NFS share, generated by - the backend. This field will be deprecated in the future, - use ``id`` instead. - id (str): - Output only. An identifier for the NFS share, generated by - the backend. This is the same value as nfs_share_id and will - replace it in the future. - state (google.cloud.bare_metal_solution_v2.types.NfsShare.State): - Output only. The state of the NFS share. - volume (str): - Output only. The underlying volume of the - share. Created automatically during - provisioning. - allowed_clients (MutableSequence[google.cloud.bare_metal_solution_v2.types.NfsShare.AllowedClient]): - List of allowed access points. - labels (MutableMapping[str, str]): - Labels as key value pairs. - requested_size_gib (int): - The requested size, in GiB. - storage_type (google.cloud.bare_metal_solution_v2.types.NfsShare.StorageType): - Immutable. The storage type of the underlying - volume. - """ - class State(proto.Enum): - r"""The possible states for this NFS share. - - Values: - STATE_UNSPECIFIED (0): - The share is in an unknown state. - PROVISIONED (1): - The share has been provisioned. - CREATING (2): - The NFS Share is being created. - UPDATING (3): - The NFS Share is being updated. - DELETING (4): - The NFS Share has been requested to be - deleted. - """ - STATE_UNSPECIFIED = 0 - PROVISIONED = 1 - CREATING = 2 - UPDATING = 3 - DELETING = 4 - - class MountPermissions(proto.Enum): - r"""The possible mount permissions. - - Values: - MOUNT_PERMISSIONS_UNSPECIFIED (0): - Permissions were not specified. - READ (1): - NFS share can be mount with read-only - permissions. - READ_WRITE (2): - NFS share can be mount with read-write - permissions. - """ - MOUNT_PERMISSIONS_UNSPECIFIED = 0 - READ = 1 - READ_WRITE = 2 - - class StorageType(proto.Enum): - r"""The storage type for a volume. - - Values: - STORAGE_TYPE_UNSPECIFIED (0): - The storage type for this volume is unknown. - SSD (1): - The storage type for this volume is SSD. - HDD (2): - This storage type for this volume is HDD. - """ - STORAGE_TYPE_UNSPECIFIED = 0 - SSD = 1 - HDD = 2 - - class AllowedClient(proto.Message): - r"""Represents an 'access point' for the share. - - Attributes: - network (str): - The network the access point sits on. - share_ip (str): - Output only. The IP address of the share on this network. - Assigned automatically during provisioning based on the - network's services_cidr. - allowed_clients_cidr (str): - The subnet of IP addresses permitted to - access the share. - mount_permissions (google.cloud.bare_metal_solution_v2.types.NfsShare.MountPermissions): - Mount permissions. - allow_dev (bool): - Allow dev flag. Which controls whether to - allow creation of devices. - allow_suid (bool): - Allow the setuid flag. - no_root_squash (bool): - Disable root squashing, which is a feature of - NFS. Root squash is a special mapping of the - remote superuser (root) identity when using - identity authentication. - nfs_path (str): - Output only. The path to access NFS, in - format shareIP:/InstanceID InstanceID is the - generated ID instead of customer provided name. - example like "10.0.0.0:/g123456789-nfs001". - """ - - network: str = proto.Field( - proto.STRING, - number=1, - ) - share_ip: str = proto.Field( - proto.STRING, - number=2, - ) - allowed_clients_cidr: str = proto.Field( - proto.STRING, - number=3, - ) - mount_permissions: 'NfsShare.MountPermissions' = proto.Field( - proto.ENUM, - number=4, - enum='NfsShare.MountPermissions', - ) - allow_dev: bool = proto.Field( - proto.BOOL, - number=5, - ) - allow_suid: bool = proto.Field( - proto.BOOL, - number=6, - ) - no_root_squash: bool = proto.Field( - proto.BOOL, - number=7, - ) - nfs_path: str = proto.Field( - proto.STRING, - number=8, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - nfs_share_id: str = proto.Field( - proto.STRING, - number=2, - ) - id: str = proto.Field( - proto.STRING, - number=8, - ) - state: State = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - volume: str = proto.Field( - proto.STRING, - number=4, - ) - allowed_clients: MutableSequence[AllowedClient] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=AllowedClient, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - requested_size_gib: int = proto.Field( - proto.INT64, - number=7, - ) - storage_type: StorageType = proto.Field( - proto.ENUM, - number=9, - enum=StorageType, - ) - - -class GetNfsShareRequest(proto.Message): - r"""Message for requesting NFS share information. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListNfsSharesRequest(proto.Message): - r"""Message for requesting a list of NFS shares. - - Attributes: - parent (str): - Required. Parent value for - ListNfsSharesRequest. - page_size (int): - Requested page size. The server might return - fewer items than requested. If unspecified, - server will pick an appropriate default. - page_token (str): - A token identifying a page of results from - the server. - filter (str): - List filter. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListNfsSharesResponse(proto.Message): - r"""Response message containing the list of NFS shares. - - Attributes: - nfs_shares (MutableSequence[google.cloud.bare_metal_solution_v2.types.NfsShare]): - The list of NFS shares. - next_page_token (str): - A token identifying a page of results from - the server. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - nfs_shares: MutableSequence['NfsShare'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='NfsShare', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateNfsShareRequest(proto.Message): - r"""Message requesting to updating an NFS share. - - Attributes: - nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): - Required. The NFS share to update. - - The ``name`` field is used to identify the NFS share to - update. Format: - projects/{project}/locations/{location}/nfsShares/{nfs_share} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. The only currently supported - fields are: ``labels`` ``allowed_clients`` - """ - - nfs_share: 'NfsShare' = proto.Field( - proto.MESSAGE, - number=1, - message='NfsShare', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RenameNfsShareRequest(proto.Message): - r"""Message requesting rename of a server. - - Attributes: - name (str): - Required. The ``name`` field is used to identify the - nfsshare. Format: - projects/{project}/locations/{location}/nfsshares/{nfsshare} - new_nfsshare_id (str): - Required. The new ``id`` of the nfsshare. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_nfsshare_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateNfsShareRequest(proto.Message): - r"""Message for creating an NFS share. - - Attributes: - parent (str): - Required. The parent project and location. - nfs_share (google.cloud.bare_metal_solution_v2.types.NfsShare): - Required. The NfsShare to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - nfs_share: 'NfsShare' = proto.Field( - proto.MESSAGE, - number=2, - message='NfsShare', - ) - - -class DeleteNfsShareRequest(proto.Message): - r"""Message for deleting an NFS share. - - Attributes: - name (str): - Required. The name of the NFS share to - delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/osimage.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/osimage.py deleted file mode 100644 index 078f7e121ad5..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/osimage.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'OSImage', - 'ListOSImagesRequest', - 'ListOSImagesResponse', - }, -) - - -class OSImage(proto.Message): - r"""Operation System image. - - Attributes: - name (str): - Output only. OS Image's unique name. - code (str): - OS Image code. - description (str): - OS Image description. - applicable_instance_types (MutableSequence[str]): - Instance types this image is applicable to. `Available - types `__ - supported_network_templates (MutableSequence[str]): - Network templates that can be used with this - OS Image. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - code: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - applicable_instance_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - supported_network_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class ListOSImagesRequest(proto.Message): - r"""Request for getting all available OS images. - - Attributes: - parent (str): - Required. Parent value for - ListProvisioningQuotasRequest. - page_size (int): - Requested page size. The server might return fewer items - than requested. If unspecified, server will pick an - appropriate default. Notice that page_size field is not - supported and won't be respected in the API request for now, - will be updated when pagination is supported. - page_token (str): - A token identifying a page of results from - the server. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListOSImagesResponse(proto.Message): - r"""Request for getting all available OS images. - - Attributes: - os_images (MutableSequence[google.cloud.bare_metal_solution_v2.types.OSImage]): - The OS images available. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - os_images: MutableSequence['OSImage'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='OSImage', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/provisioning.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/provisioning.py deleted file mode 100644 index 1c5a4e185737..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/provisioning.py +++ /dev/null @@ -1,1050 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bare_metal_solution_v2.types import common -from google.cloud.bare_metal_solution_v2.types import network -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'ProvisioningConfig', - 'SubmitProvisioningConfigRequest', - 'SubmitProvisioningConfigResponse', - 'ProvisioningQuota', - 'ListProvisioningQuotasRequest', - 'ListProvisioningQuotasResponse', - 'InstanceConfig', - 'VolumeConfig', - 'NetworkConfig', - 'InstanceQuota', - 'GetProvisioningConfigRequest', - 'CreateProvisioningConfigRequest', - 'UpdateProvisioningConfigRequest', - }, -) - - -class ProvisioningConfig(proto.Message): - r"""A provisioning configuration. - - Attributes: - name (str): - Output only. The system-generated name of the - provisioning config. This follows the UUID - format. - instances (MutableSequence[google.cloud.bare_metal_solution_v2.types.InstanceConfig]): - Instances to be created. - networks (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkConfig]): - Networks to be created. - volumes (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeConfig]): - Volumes to be created. - ticket_id (str): - A generated ticket id to track provisioning - request. - handover_service_account (str): - A service account to enable customers to - access instance credentials upon handover. - email (str): - Email provided to send a confirmation with - provisioning config to. Deprecated in favour of - email field in request messages. - state (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig.State): - Output only. State of ProvisioningConfig. - location (str): - Optional. Location name of this - ProvisioningConfig. It is optional only for - Intake UI transition period. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update timestamp. - cloud_console_uri (str): - Output only. URI to Cloud Console UI view of - this provisioning config. - vpc_sc_enabled (bool): - If true, VPC SC is enabled for the cluster. - status_message (str): - Optional status messages associated with the - FAILED state. - custom_id (str): - Optional. The user-defined identifier of the - provisioning config. - """ - class State(proto.Enum): - r"""The possible states for this ProvisioningConfig. - - Values: - STATE_UNSPECIFIED (0): - State wasn't specified. - DRAFT (1): - ProvisioningConfig is a draft and can be - freely modified. - SUBMITTED (2): - ProvisioningConfig was already submitted and - cannot be modified. - PROVISIONING (3): - ProvisioningConfig was in the provisioning - state. Initially this state comes from the work - order table in big query when SNOW is used. - Later this field can be set by the work order - API. - PROVISIONED (4): - ProvisioningConfig was provisioned, meaning - the resources exist. - VALIDATED (5): - ProvisioningConfig was validated. A - validation tool will be run to set this state. - CANCELLED (6): - ProvisioningConfig was canceled. - FAILED (7): - The request is submitted for provisioning, - with error return. - """ - STATE_UNSPECIFIED = 0 - DRAFT = 1 - SUBMITTED = 2 - PROVISIONING = 3 - PROVISIONED = 4 - VALIDATED = 5 - CANCELLED = 6 - FAILED = 7 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - instances: MutableSequence['InstanceConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InstanceConfig', - ) - networks: MutableSequence['NetworkConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='NetworkConfig', - ) - volumes: MutableSequence['VolumeConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='VolumeConfig', - ) - ticket_id: str = proto.Field( - proto.STRING, - number=5, - ) - handover_service_account: str = proto.Field( - proto.STRING, - number=6, - ) - email: str = proto.Field( - proto.STRING, - number=7, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - location: str = proto.Field( - proto.STRING, - number=9, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - cloud_console_uri: str = proto.Field( - proto.STRING, - number=11, - ) - vpc_sc_enabled: bool = proto.Field( - proto.BOOL, - number=12, - ) - status_message: str = proto.Field( - proto.STRING, - number=13, - ) - custom_id: str = proto.Field( - proto.STRING, - number=14, - ) - - -class SubmitProvisioningConfigRequest(proto.Message): - r"""Request for SubmitProvisioningConfig. - - Attributes: - parent (str): - Required. The parent project and location - containing the ProvisioningConfig. - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - Required. The ProvisioningConfig to create. - email (str): - Optional. Email provided to send a - confirmation with provisioning config to. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - provisioning_config: 'ProvisioningConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='ProvisioningConfig', - ) - email: str = proto.Field( - proto.STRING, - number=3, - ) - - -class SubmitProvisioningConfigResponse(proto.Message): - r"""Response for SubmitProvisioningConfig. - - Attributes: - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - The submitted provisioning config. - """ - - provisioning_config: 'ProvisioningConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='ProvisioningConfig', - ) - - -class ProvisioningQuota(proto.Message): - r"""A provisioning quota for a given project. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The name of the provisioning - quota. - asset_type (google.cloud.bare_metal_solution_v2.types.ProvisioningQuota.AssetType): - The asset type of this provisioning quota. - gcp_service (str): - The gcp service of the provisioning quota. - location (str): - The specific location of the provisioining - quota. - available_count (int): - The available count of the provisioning - quota. - instance_quota (google.cloud.bare_metal_solution_v2.types.InstanceQuota): - Instance quota. - - This field is a member of `oneof`_ ``quota``. - server_count (int): - Server count. - - This field is a member of `oneof`_ ``availability``. - network_bandwidth (int): - Network bandwidth, Gbps - - This field is a member of `oneof`_ ``availability``. - storage_gib (int): - Storage size (GB). - - This field is a member of `oneof`_ ``availability``. - """ - class AssetType(proto.Enum): - r"""The available asset types for intake. - - Values: - ASSET_TYPE_UNSPECIFIED (0): - The unspecified type. - ASSET_TYPE_SERVER (1): - The server asset type. - ASSET_TYPE_STORAGE (2): - The storage asset type. - ASSET_TYPE_NETWORK (3): - The network asset type. - """ - ASSET_TYPE_UNSPECIFIED = 0 - ASSET_TYPE_SERVER = 1 - ASSET_TYPE_STORAGE = 2 - ASSET_TYPE_NETWORK = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - asset_type: AssetType = proto.Field( - proto.ENUM, - number=2, - enum=AssetType, - ) - gcp_service: str = proto.Field( - proto.STRING, - number=3, - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - available_count: int = proto.Field( - proto.INT32, - number=5, - ) - instance_quota: 'InstanceQuota' = proto.Field( - proto.MESSAGE, - number=6, - oneof='quota', - message='InstanceQuota', - ) - server_count: int = proto.Field( - proto.INT64, - number=7, - oneof='availability', - ) - network_bandwidth: int = proto.Field( - proto.INT64, - number=8, - oneof='availability', - ) - storage_gib: int = proto.Field( - proto.INT64, - number=9, - oneof='availability', - ) - - -class ListProvisioningQuotasRequest(proto.Message): - r"""Message for requesting the list of provisioning quotas. - - Attributes: - parent (str): - Required. Parent value for - ListProvisioningQuotasRequest. - page_size (int): - Requested page size. The server might return fewer items - than requested. If unspecified, server will pick an - appropriate default. Notice that page_size field is not - supported and won't be respected in the API request for now, - will be updated when pagination is supported. - page_token (str): - A token identifying a page of results from - the server. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListProvisioningQuotasResponse(proto.Message): - r"""Response message for the list of provisioning quotas. - - Attributes: - provisioning_quotas (MutableSequence[google.cloud.bare_metal_solution_v2.types.ProvisioningQuota]): - The provisioning quotas registered in this - project. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - provisioning_quotas: MutableSequence['ProvisioningQuota'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ProvisioningQuota', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class InstanceConfig(proto.Message): - r"""Configuration parameters for a new instance. - - Attributes: - name (str): - Output only. The name of the instance config. - id (str): - A transient unique identifier to idenfity an - instance within an ProvisioningConfig request. - instance_type (str): - Instance type. `Available - types `__ - hyperthreading (bool): - Whether the instance should be provisioned - with Hyperthreading enabled. - os_image (str): - OS image to initialize the instance. `Available - images `__ - client_network (google.cloud.bare_metal_solution_v2.types.InstanceConfig.NetworkAddress): - Client network address. Filled if - InstanceConfig.multivlan_config is false. - private_network (google.cloud.bare_metal_solution_v2.types.InstanceConfig.NetworkAddress): - Private network address, if any. Filled if - InstanceConfig.multivlan_config is false. - user_note (str): - User note field, it can be used by customers - to add additional information for the BMS Ops - team . - account_networks_enabled (bool): - If true networks can be from different - projects of the same vendor account. - network_config (google.cloud.bare_metal_solution_v2.types.InstanceConfig.NetworkConfig): - The type of network configuration on the - instance. - network_template (str): - Server network template name. Filled if - InstanceConfig.multivlan_config is true. - logical_interfaces (MutableSequence[google.cloud.bare_metal_solution_v2.types.LogicalInterface]): - List of logical interfaces for the instance. The number of - logical interfaces will be the same as number of hardware - bond/nic on the chosen network template. Filled if - InstanceConfig.multivlan_config is true. - ssh_key_names (MutableSequence[str]): - List of names of ssh keys used to provision - the instance. - """ - class NetworkConfig(proto.Enum): - r"""The network configuration of the instance. - - Values: - NETWORKCONFIG_UNSPECIFIED (0): - The unspecified network configuration. - SINGLE_VLAN (1): - Instance part of single client network and - single private network. - MULTI_VLAN (2): - Instance part of multiple (or single) client - networks and private networks. - """ - NETWORKCONFIG_UNSPECIFIED = 0 - SINGLE_VLAN = 1 - MULTI_VLAN = 2 - - class NetworkAddress(proto.Message): - r"""A network. - - Attributes: - network_id (str): - Id of the network to use, within the same - ProvisioningConfig request. - address (str): - IPv4 address to be assigned to the server. - existing_network_id (str): - Name of the existing network to use. - """ - - network_id: str = proto.Field( - proto.STRING, - number=1, - ) - address: str = proto.Field( - proto.STRING, - number=2, - ) - existing_network_id: str = proto.Field( - proto.STRING, - number=3, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - instance_type: str = proto.Field( - proto.STRING, - number=3, - ) - hyperthreading: bool = proto.Field( - proto.BOOL, - number=4, - ) - os_image: str = proto.Field( - proto.STRING, - number=5, - ) - client_network: NetworkAddress = proto.Field( - proto.MESSAGE, - number=6, - message=NetworkAddress, - ) - private_network: NetworkAddress = proto.Field( - proto.MESSAGE, - number=7, - message=NetworkAddress, - ) - user_note: str = proto.Field( - proto.STRING, - number=8, - ) - account_networks_enabled: bool = proto.Field( - proto.BOOL, - number=9, - ) - network_config: NetworkConfig = proto.Field( - proto.ENUM, - number=10, - enum=NetworkConfig, - ) - network_template: str = proto.Field( - proto.STRING, - number=11, - ) - logical_interfaces: MutableSequence[network.LogicalInterface] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=network.LogicalInterface, - ) - ssh_key_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=13, - ) - - -class VolumeConfig(proto.Message): - r"""Configuration parameters for a new volume. - - Attributes: - name (str): - Output only. The name of the volume config. - id (str): - A transient unique identifier to identify a - volume within an ProvisioningConfig request. - snapshots_enabled (bool): - Whether snapshots should be enabled. - type_ (google.cloud.bare_metal_solution_v2.types.VolumeConfig.Type): - The type of this Volume. - protocol (google.cloud.bare_metal_solution_v2.types.VolumeConfig.Protocol): - Volume protocol. - size_gb (int): - The requested size of this volume, in GB. - lun_ranges (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeConfig.LunRange]): - LUN ranges to be configured. Set only when protocol is - PROTOCOL_FC. - machine_ids (MutableSequence[str]): - Machine ids connected to this volume. Set only when protocol - is PROTOCOL_FC. - nfs_exports (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeConfig.NfsExport]): - NFS exports. Set only when protocol is PROTOCOL_NFS. - user_note (str): - User note field, it can be used by customers - to add additional information for the BMS Ops - team . - gcp_service (str): - The GCP service of the storage volume. Available gcp_service - are in - https://cloud.google.com/bare-metal/docs/bms-planning. - performance_tier (google.cloud.bare_metal_solution_v2.types.VolumePerformanceTier): - Performance tier of the Volume. - Default is SHARED. - """ - class Type(proto.Enum): - r"""The types of Volumes. - - Values: - TYPE_UNSPECIFIED (0): - The unspecified type. - FLASH (1): - This Volume is on flash. - DISK (2): - This Volume is on disk. - """ - TYPE_UNSPECIFIED = 0 - FLASH = 1 - DISK = 2 - - class Protocol(proto.Enum): - r"""The protocol used to access the volume. - - Values: - PROTOCOL_UNSPECIFIED (0): - Unspecified value. - PROTOCOL_FC (1): - Fibre channel. - PROTOCOL_NFS (2): - Network file system. - """ - PROTOCOL_UNSPECIFIED = 0 - PROTOCOL_FC = 1 - PROTOCOL_NFS = 2 - - class LunRange(proto.Message): - r"""A LUN(Logical Unit Number) range. - - Attributes: - quantity (int): - Number of LUNs to create. - size_gb (int): - The requested size of each LUN, in GB. - """ - - quantity: int = proto.Field( - proto.INT32, - number=1, - ) - size_gb: int = proto.Field( - proto.INT32, - number=2, - ) - - class NfsExport(proto.Message): - r"""A NFS export entry. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - network_id (str): - Network to use to publish the export. - machine_id (str): - Either a single machine, identified by an ID, - or a comma-separated list of machine IDs. - - This field is a member of `oneof`_ ``client``. - cidr (str): - A CIDR range. - - This field is a member of `oneof`_ ``client``. - permissions (google.cloud.bare_metal_solution_v2.types.VolumeConfig.NfsExport.Permissions): - Export permissions. - no_root_squash (bool): - Disable root squashing, which is a feature of - NFS. Root squash is a special mapping of the - remote superuser (root) identity when using - identity authentication. - allow_suid (bool): - Allow the setuid flag. - allow_dev (bool): - Allow dev flag in NfsShare - AllowedClientsRequest. - """ - class Permissions(proto.Enum): - r"""Permissions that can granted for an export. - - Values: - PERMISSIONS_UNSPECIFIED (0): - Unspecified value. - READ_ONLY (1): - Read-only permission. - READ_WRITE (2): - Read-write permission. - """ - PERMISSIONS_UNSPECIFIED = 0 - READ_ONLY = 1 - READ_WRITE = 2 - - network_id: str = proto.Field( - proto.STRING, - number=1, - ) - machine_id: str = proto.Field( - proto.STRING, - number=2, - oneof='client', - ) - cidr: str = proto.Field( - proto.STRING, - number=3, - oneof='client', - ) - permissions: 'VolumeConfig.NfsExport.Permissions' = proto.Field( - proto.ENUM, - number=4, - enum='VolumeConfig.NfsExport.Permissions', - ) - no_root_squash: bool = proto.Field( - proto.BOOL, - number=5, - ) - allow_suid: bool = proto.Field( - proto.BOOL, - number=6, - ) - allow_dev: bool = proto.Field( - proto.BOOL, - number=7, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - snapshots_enabled: bool = proto.Field( - proto.BOOL, - number=3, - ) - type_: Type = proto.Field( - proto.ENUM, - number=4, - enum=Type, - ) - protocol: Protocol = proto.Field( - proto.ENUM, - number=5, - enum=Protocol, - ) - size_gb: int = proto.Field( - proto.INT32, - number=6, - ) - lun_ranges: MutableSequence[LunRange] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=LunRange, - ) - machine_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - nfs_exports: MutableSequence[NfsExport] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=NfsExport, - ) - user_note: str = proto.Field( - proto.STRING, - number=10, - ) - gcp_service: str = proto.Field( - proto.STRING, - number=11, - ) - performance_tier: common.VolumePerformanceTier = proto.Field( - proto.ENUM, - number=12, - enum=common.VolumePerformanceTier, - ) - - -class NetworkConfig(proto.Message): - r"""Configuration parameters for a new network. - - Attributes: - name (str): - Output only. The name of the network config. - id (str): - A transient unique identifier to identify a - volume within an ProvisioningConfig request. - type_ (google.cloud.bare_metal_solution_v2.types.NetworkConfig.Type): - The type of this network, either Client or - Private. - bandwidth (google.cloud.bare_metal_solution_v2.types.NetworkConfig.Bandwidth): - Interconnect bandwidth. Set only when type is - CLIENT. - vlan_attachments (MutableSequence[google.cloud.bare_metal_solution_v2.types.NetworkConfig.IntakeVlanAttachment]): - List of VLAN attachments. As of now there are - always 2 attachments, but it is going to change - in the future (multi vlan). - cidr (str): - CIDR range of the network. - service_cidr (google.cloud.bare_metal_solution_v2.types.NetworkConfig.ServiceCidr): - Service CIDR, if any. - user_note (str): - User note field, it can be used by customers - to add additional information for the BMS Ops - team . - gcp_service (str): - The GCP service of the network. Available gcp_service are in - https://cloud.google.com/bare-metal/docs/bms-planning. - vlan_same_project (bool): - Whether the VLAN attachment pair is located - in the same project. - jumbo_frames_enabled (bool): - The JumboFramesEnabled option for customer to - set. - """ - class Type(proto.Enum): - r"""Network type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified value. - CLIENT (1): - Client network, that is a network peered to a - GCP VPC. - PRIVATE (2): - Private network, that is a network local to - the BMS POD. - """ - TYPE_UNSPECIFIED = 0 - CLIENT = 1 - PRIVATE = 2 - - class Bandwidth(proto.Enum): - r"""Interconnect bandwidth. - - Values: - BANDWIDTH_UNSPECIFIED (0): - Unspecified value. - BW_1_GBPS (1): - 1 Gbps. - BW_2_GBPS (2): - 2 Gbps. - BW_5_GBPS (3): - 5 Gbps. - BW_10_GBPS (4): - 10 Gbps. - """ - BANDWIDTH_UNSPECIFIED = 0 - BW_1_GBPS = 1 - BW_2_GBPS = 2 - BW_5_GBPS = 3 - BW_10_GBPS = 4 - - class ServiceCidr(proto.Enum): - r"""Service network block. - - Values: - SERVICE_CIDR_UNSPECIFIED (0): - Unspecified value. - DISABLED (1): - Services are disabled for the given network. - HIGH_26 (2): - Use the highest /26 block of the network to - host services. - HIGH_27 (3): - Use the highest /27 block of the network to - host services. - HIGH_28 (4): - Use the highest /28 block of the network to - host services. - """ - SERVICE_CIDR_UNSPECIFIED = 0 - DISABLED = 1 - HIGH_26 = 2 - HIGH_27 = 3 - HIGH_28 = 4 - - class IntakeVlanAttachment(proto.Message): - r"""A GCP vlan attachment. - - Attributes: - id (str): - Identifier of the VLAN attachment. - pairing_key (str): - Attachment pairing key. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - pairing_key: str = proto.Field( - proto.STRING, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - type_: Type = proto.Field( - proto.ENUM, - number=3, - enum=Type, - ) - bandwidth: Bandwidth = proto.Field( - proto.ENUM, - number=4, - enum=Bandwidth, - ) - vlan_attachments: MutableSequence[IntakeVlanAttachment] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=IntakeVlanAttachment, - ) - cidr: str = proto.Field( - proto.STRING, - number=6, - ) - service_cidr: ServiceCidr = proto.Field( - proto.ENUM, - number=7, - enum=ServiceCidr, - ) - user_note: str = proto.Field( - proto.STRING, - number=8, - ) - gcp_service: str = proto.Field( - proto.STRING, - number=9, - ) - vlan_same_project: bool = proto.Field( - proto.BOOL, - number=10, - ) - jumbo_frames_enabled: bool = proto.Field( - proto.BOOL, - number=11, - ) - - -class InstanceQuota(proto.Message): - r"""A resource budget. - - Attributes: - name (str): - Output only. The name of the instance quota. - instance_type (str): - Instance type. Deprecated: use gcp_service. - gcp_service (str): - The gcp service of the provisioning quota. - location (str): - Location where the quota applies. - available_machine_count (int): - Number of machines than can be created for the given - location and instance_type. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - instance_type: str = proto.Field( - proto.STRING, - number=2, - ) - gcp_service: str = proto.Field( - proto.STRING, - number=5, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - available_machine_count: int = proto.Field( - proto.INT32, - number=4, - ) - - -class GetProvisioningConfigRequest(proto.Message): - r"""Request for GetProvisioningConfig. - - Attributes: - name (str): - Required. Name of the ProvisioningConfig. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateProvisioningConfigRequest(proto.Message): - r"""Request for CreateProvisioningConfig. - - Attributes: - parent (str): - Required. The parent project and location - containing the ProvisioningConfig. - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - Required. The ProvisioningConfig to create. - email (str): - Optional. Email provided to send a - confirmation with provisioning config to. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - provisioning_config: 'ProvisioningConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='ProvisioningConfig', - ) - email: str = proto.Field( - proto.STRING, - number=3, - ) - - -class UpdateProvisioningConfigRequest(proto.Message): - r"""Message for updating a ProvisioningConfig. - - Attributes: - provisioning_config (google.cloud.bare_metal_solution_v2.types.ProvisioningConfig): - Required. The ProvisioningConfig to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. The list of fields to update. - email (str): - Optional. Email provided to send a - confirmation with provisioning config to. - """ - - provisioning_config: 'ProvisioningConfig' = proto.Field( - proto.MESSAGE, - number=1, - message='ProvisioningConfig', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - email: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/ssh_key.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/ssh_key.py deleted file mode 100644 index 94d8079132bc..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/ssh_key.py +++ /dev/null @@ -1,161 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'SSHKey', - 'ListSSHKeysRequest', - 'ListSSHKeysResponse', - 'CreateSSHKeyRequest', - 'DeleteSSHKeyRequest', - }, -) - - -class SSHKey(proto.Message): - r"""An SSH key, used for authorizing with the interactive serial - console feature. - - Attributes: - name (str): - Output only. The name of this SSH key. - Currently, the only valid value for the location - is "global". - public_key (str): - The public SSH key. This must be in OpenSSH .authorized_keys - format. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - public_key: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListSSHKeysRequest(proto.Message): - r"""Message for listing the public SSH keys in a project. - - Attributes: - parent (str): - Required. The parent containing the SSH keys. - Currently, the only valid value for the location - is "global". - page_size (int): - The maximum number of items to return. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListSSHKeysResponse(proto.Message): - r"""Message for response of ListSSHKeys. - - Attributes: - ssh_keys (MutableSequence[google.cloud.bare_metal_solution_v2.types.SSHKey]): - The SSH keys registered in the project. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - ssh_keys: MutableSequence['SSHKey'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SSHKey', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=90, - ) - - -class CreateSSHKeyRequest(proto.Message): - r"""Message for registering a public SSH key in a project. - - Attributes: - parent (str): - Required. The parent containing the SSH keys. - ssh_key (google.cloud.bare_metal_solution_v2.types.SSHKey): - Required. The SSH key to register. - ssh_key_id (str): - Required. The ID to use for the key, which will become the - final component of the key's resource name. - - This value must match the regex: [a-zA-Z0-9@.-_]{1,64} - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - ssh_key: 'SSHKey' = proto.Field( - proto.MESSAGE, - number=2, - message='SSHKey', - ) - ssh_key_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteSSHKeyRequest(proto.Message): - r"""Message for deleting an SSH key from a project. - - Attributes: - name (str): - Required. The name of the SSH key to delete. - Currently, the only valid value for the location - is "global". - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/volume.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/volume.py deleted file mode 100644 index f5a316bbe6eb..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/volume.py +++ /dev/null @@ -1,532 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bare_metal_solution_v2.types import common -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'Volume', - 'GetVolumeRequest', - 'ListVolumesRequest', - 'ListVolumesResponse', - 'UpdateVolumeRequest', - 'RenameVolumeRequest', - 'EvictVolumeRequest', - 'ResizeVolumeRequest', - }, -) - - -class Volume(proto.Message): - r"""A storage volume. - - Attributes: - name (str): - Output only. The resource name of this ``Volume``. Resource - names are schemeless URIs that follow the conventions in - https://cloud.google.com/apis/design/resource_names. Format: - ``projects/{project}/locations/{location}/volumes/{volume}`` - id (str): - An identifier for the ``Volume``, generated by the backend. - storage_type (google.cloud.bare_metal_solution_v2.types.Volume.StorageType): - The storage type for this volume. - state (google.cloud.bare_metal_solution_v2.types.Volume.State): - The state of this storage volume. - requested_size_gib (int): - The requested size of this storage volume, in - GiB. - originally_requested_size_gib (int): - Originally requested size, in GiB. - current_size_gib (int): - The current size of this storage volume, in - GiB, including space reserved for snapshots. - This size might be different than the requested - size if the storage volume has been configured - with auto grow or auto shrink. - emergency_size_gib (int): - Additional emergency size that was requested for this - Volume, in GiB. current_size_gib includes this value. - max_size_gib (int): - Maximum size volume can be expanded to in - case of evergency, in GiB. - auto_grown_size_gib (int): - The size, in GiB, that this storage volume - has expanded as a result of an auto grow policy. - In the absence of auto-grow, the value is 0. - remaining_space_gib (int): - The space remaining in the storage volume for - new LUNs, in GiB, excluding space reserved for - snapshots. - snapshot_reservation_detail (google.cloud.bare_metal_solution_v2.types.Volume.SnapshotReservationDetail): - Details about snapshot space reservation and - usage on the storage volume. - snapshot_auto_delete_behavior (google.cloud.bare_metal_solution_v2.types.Volume.SnapshotAutoDeleteBehavior): - The behavior to use when snapshot reserved - space is full. - labels (MutableMapping[str, str]): - Labels as key value pairs. - snapshot_enabled (bool): - Whether snapshots are enabled. - pod (str): - Immutable. Pod name. - protocol (google.cloud.bare_metal_solution_v2.types.Volume.Protocol): - Output only. Storage protocol for the Volume. - boot_volume (bool): - Output only. Whether this volume is a boot - volume. A boot volume is one which contains a - boot LUN. - performance_tier (google.cloud.bare_metal_solution_v2.types.VolumePerformanceTier): - Immutable. Performance tier of the Volume. - Default is SHARED. - notes (str): - Input only. User-specified notes for new - Volume. Used to provision Volumes that require - manual intervention. - workload_profile (google.cloud.bare_metal_solution_v2.types.Volume.WorkloadProfile): - The workload profile for the volume. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time after which volume will be - fully deleted. It is filled only for volumes in - COOLOFF state. - instances (MutableSequence[str]): - Output only. Instances this Volume is - attached to. This field is set only in Get - requests. - attached (bool): - Output only. Is the Volume attached at at least one - instance. This field is a lightweight counterpart of - ``instances`` field. It is filled in List responses as well. - """ - class StorageType(proto.Enum): - r"""The storage type for a volume. - - Values: - STORAGE_TYPE_UNSPECIFIED (0): - The storage type for this volume is unknown. - SSD (1): - The storage type for this volume is SSD. - HDD (2): - This storage type for this volume is HDD. - """ - STORAGE_TYPE_UNSPECIFIED = 0 - SSD = 1 - HDD = 2 - - class State(proto.Enum): - r"""The possible states for a storage volume. - - Values: - STATE_UNSPECIFIED (0): - The storage volume is in an unknown state. - CREATING (1): - The storage volume is being created. - READY (2): - The storage volume is ready for use. - DELETING (3): - The storage volume has been requested to be - deleted. - UPDATING (4): - The storage volume is being updated. - COOL_OFF (5): - The storage volume is in cool off state. It will be deleted - after ``expire_time``. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - READY = 2 - DELETING = 3 - UPDATING = 4 - COOL_OFF = 5 - - class SnapshotAutoDeleteBehavior(proto.Enum): - r"""The kinds of auto delete behavior to use when snapshot - reserved space is full. - - Values: - SNAPSHOT_AUTO_DELETE_BEHAVIOR_UNSPECIFIED (0): - The unspecified behavior. - DISABLED (1): - Don't delete any snapshots. This disables new - snapshot creation, as long as the snapshot - reserved space is full. - OLDEST_FIRST (2): - Delete the oldest snapshots first. - NEWEST_FIRST (3): - Delete the newest snapshots first. - """ - SNAPSHOT_AUTO_DELETE_BEHAVIOR_UNSPECIFIED = 0 - DISABLED = 1 - OLDEST_FIRST = 2 - NEWEST_FIRST = 3 - - class Protocol(proto.Enum): - r"""Storage protocol. - - Values: - PROTOCOL_UNSPECIFIED (0): - Value is not specified. - FIBRE_CHANNEL (1): - Fibre Channel protocol. - NFS (2): - NFS protocol means Volume is a NFS Share - volume. Such volumes cannot be manipulated via - Volumes API. - """ - PROTOCOL_UNSPECIFIED = 0 - FIBRE_CHANNEL = 1 - NFS = 2 - - class WorkloadProfile(proto.Enum): - r"""The possible values for a workload profile. - - Values: - WORKLOAD_PROFILE_UNSPECIFIED (0): - The workload profile is in an unknown state. - GENERIC (1): - The workload profile is generic. - HANA (2): - The workload profile is hana. - """ - WORKLOAD_PROFILE_UNSPECIFIED = 0 - GENERIC = 1 - HANA = 2 - - class SnapshotReservationDetail(proto.Message): - r"""Details about snapshot space reservation and usage on the - storage volume. - - Attributes: - reserved_space_gib (int): - The space on this storage volume reserved for - snapshots, shown in GiB. - reserved_space_used_percent (int): - The percent of snapshot space on this storage - volume actually being used by the snapshot - copies. This value might be higher than 100% if - the snapshot copies have overflowed into the - data portion of the storage volume. - reserved_space_remaining_gib (int): - The amount, in GiB, of available space in - this storage volume's reserved snapshot space. - reserved_space_percent (int): - Percent of the total Volume size reserved for snapshot - copies. Enabling snapshots requires reserving 20% or more of - the storage volume space for snapshots. Maximum reserved - space for snapshots is 40%. Setting this field will - effectively set snapshot_enabled to true. - """ - - reserved_space_gib: int = proto.Field( - proto.INT64, - number=1, - ) - reserved_space_used_percent: int = proto.Field( - proto.INT32, - number=2, - ) - reserved_space_remaining_gib: int = proto.Field( - proto.INT64, - number=3, - ) - reserved_space_percent: int = proto.Field( - proto.INT32, - number=4, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=11, - ) - storage_type: StorageType = proto.Field( - proto.ENUM, - number=2, - enum=StorageType, - ) - state: State = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - requested_size_gib: int = proto.Field( - proto.INT64, - number=4, - ) - originally_requested_size_gib: int = proto.Field( - proto.INT64, - number=16, - ) - current_size_gib: int = proto.Field( - proto.INT64, - number=5, - ) - emergency_size_gib: int = proto.Field( - proto.INT64, - number=14, - ) - max_size_gib: int = proto.Field( - proto.INT64, - number=17, - ) - auto_grown_size_gib: int = proto.Field( - proto.INT64, - number=6, - ) - remaining_space_gib: int = proto.Field( - proto.INT64, - number=7, - ) - snapshot_reservation_detail: SnapshotReservationDetail = proto.Field( - proto.MESSAGE, - number=8, - message=SnapshotReservationDetail, - ) - snapshot_auto_delete_behavior: SnapshotAutoDeleteBehavior = proto.Field( - proto.ENUM, - number=9, - enum=SnapshotAutoDeleteBehavior, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=12, - ) - snapshot_enabled: bool = proto.Field( - proto.BOOL, - number=13, - ) - pod: str = proto.Field( - proto.STRING, - number=15, - ) - protocol: Protocol = proto.Field( - proto.ENUM, - number=18, - enum=Protocol, - ) - boot_volume: bool = proto.Field( - proto.BOOL, - number=19, - ) - performance_tier: common.VolumePerformanceTier = proto.Field( - proto.ENUM, - number=20, - enum=common.VolumePerformanceTier, - ) - notes: str = proto.Field( - proto.STRING, - number=21, - ) - workload_profile: WorkloadProfile = proto.Field( - proto.ENUM, - number=22, - enum=WorkloadProfile, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=24, - message=timestamp_pb2.Timestamp, - ) - instances: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=25, - ) - attached: bool = proto.Field( - proto.BOOL, - number=26, - ) - - -class GetVolumeRequest(proto.Message): - r"""Message for requesting storage volume information. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListVolumesRequest(proto.Message): - r"""Message for requesting a list of storage volumes. - - Attributes: - parent (str): - Required. Parent value for - ListVolumesRequest. - page_size (int): - Requested page size. The server might return - fewer items than requested. If unspecified, - server will pick an appropriate default. - page_token (str): - A token identifying a page of results from - the server. - filter (str): - List filter. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListVolumesResponse(proto.Message): - r"""Response message containing the list of storage volumes. - - Attributes: - volumes (MutableSequence[google.cloud.bare_metal_solution_v2.types.Volume]): - The list of storage volumes. - next_page_token (str): - A token identifying a page of results from - the server. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - volumes: MutableSequence['Volume'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Volume', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateVolumeRequest(proto.Message): - r"""Message for updating a volume. - - Attributes: - volume (google.cloud.bare_metal_solution_v2.types.Volume): - Required. The volume to update. - - The ``name`` field is used to identify the volume to update. - Format: - projects/{project}/locations/{location}/volumes/{volume} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - The only currently supported fields are: - - 'labels' - """ - - volume: 'Volume' = proto.Field( - proto.MESSAGE, - number=1, - message='Volume', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RenameVolumeRequest(proto.Message): - r"""Message requesting rename of a server. - - Attributes: - name (str): - Required. The ``name`` field is used to identify the volume. - Format: - projects/{project}/locations/{location}/volumes/{volume} - new_volume_id (str): - Required. The new ``id`` of the volume. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_volume_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class EvictVolumeRequest(proto.Message): - r"""Request for skip volume cooloff and delete it. - - Attributes: - name (str): - Required. The name of the Volume. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ResizeVolumeRequest(proto.Message): - r"""Request for emergency resize Volume. - - Attributes: - volume (str): - Required. Volume to resize. - size_gib (int): - New Volume size, in GiB. - """ - - volume: str = proto.Field( - proto.STRING, - number=1, - ) - size_gib: int = proto.Field( - proto.INT64, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/volume_snapshot.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/volume_snapshot.py deleted file mode 100644 index 439f4e5fe2f0..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/google/cloud/bare_metal_solution_v2/types/volume_snapshot.py +++ /dev/null @@ -1,231 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.baremetalsolution.v2', - manifest={ - 'VolumeSnapshot', - 'GetVolumeSnapshotRequest', - 'ListVolumeSnapshotsRequest', - 'ListVolumeSnapshotsResponse', - 'DeleteVolumeSnapshotRequest', - 'CreateVolumeSnapshotRequest', - 'RestoreVolumeSnapshotRequest', - }, -) - - -class VolumeSnapshot(proto.Message): - r"""A snapshot of a volume. Only boot volumes can have snapshots. - - Attributes: - name (str): - The name of the snapshot. - id (str): - Output only. An identifier for the snapshot, - generated by the backend. - description (str): - The description of the snapshot. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the - snapshot. - storage_volume (str): - Output only. The name of the volume which - this snapshot belongs to. - type_ (google.cloud.bare_metal_solution_v2.types.VolumeSnapshot.SnapshotType): - Output only. The type of the snapshot which - indicates whether it was scheduled or - manual/ad-hoc. - """ - class SnapshotType(proto.Enum): - r"""Represents the type of a snapshot. - - Values: - SNAPSHOT_TYPE_UNSPECIFIED (0): - Type is not specified. - AD_HOC (1): - Snapshot was taken manually by user. - SCHEDULED (2): - Snapshot was taken automatically as a part of - a snapshot schedule. - """ - SNAPSHOT_TYPE_UNSPECIFIED = 0 - AD_HOC = 1 - SCHEDULED = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=6, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - storage_volume: str = proto.Field( - proto.STRING, - number=5, - ) - type_: SnapshotType = proto.Field( - proto.ENUM, - number=7, - enum=SnapshotType, - ) - - -class GetVolumeSnapshotRequest(proto.Message): - r"""Message for requesting volume snapshot information. - - Attributes: - name (str): - Required. The name of the snapshot. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListVolumeSnapshotsRequest(proto.Message): - r"""Message for requesting a list of volume snapshots. - - Attributes: - parent (str): - Required. Parent value for - ListVolumesRequest. - page_size (int): - Requested page size. The server might return - fewer items than requested. If unspecified, - server will pick an appropriate default. - page_token (str): - A token identifying a page of results from - the server. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListVolumeSnapshotsResponse(proto.Message): - r"""Response message containing the list of volume snapshots. - - Attributes: - volume_snapshots (MutableSequence[google.cloud.bare_metal_solution_v2.types.VolumeSnapshot]): - The list of snapshots. - next_page_token (str): - A token identifying a page of results from - the server. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - volume_snapshots: MutableSequence['VolumeSnapshot'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='VolumeSnapshot', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class DeleteVolumeSnapshotRequest(proto.Message): - r"""Message for deleting named Volume snapshot. - - Attributes: - name (str): - Required. The name of the snapshot to delete. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateVolumeSnapshotRequest(proto.Message): - r"""Message for creating a volume snapshot. - - Attributes: - parent (str): - Required. The volume to snapshot. - volume_snapshot (google.cloud.bare_metal_solution_v2.types.VolumeSnapshot): - Required. The snapshot to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - volume_snapshot: 'VolumeSnapshot' = proto.Field( - proto.MESSAGE, - number=2, - message='VolumeSnapshot', - ) - - -class RestoreVolumeSnapshotRequest(proto.Message): - r"""Message for restoring a volume snapshot. - - Attributes: - volume_snapshot (str): - Required. Name of the snapshot which will be - used to restore its parent volume. - """ - - volume_snapshot: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/mypy.ini b/owl-bot-staging/google-cloud-bare-metal-solution/v2/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/noxfile.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/noxfile.py deleted file mode 100644 index 801b21257198..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bare-metal-solution' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bare_metal_solution_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bare_metal_solution_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py deleted file mode 100644 index 6845b54b4c1e..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_create_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateNfsShareRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py deleted file mode 100644 index a45cfa0f71ab..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_create_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateNfsShareRequest( - parent="parent_value", - ) - - # Make the request - operation = client.create_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py deleted file mode 100644 index 66a39a722142..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_create_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py deleted file mode 100644 index 07faed7c7f12..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_create_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py deleted file mode 100644 index 249844b00b9b..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSSHKey -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_create_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateSSHKeyRequest( - parent="parent_value", - ssh_key_id="ssh_key_id_value", - ) - - # Make the request - response = await client.create_ssh_key(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py deleted file mode 100644 index 24e0c9943030..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSSHKey -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_create_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateSSHKeyRequest( - parent="parent_value", - ssh_key_id="ssh_key_id_value", - ) - - # Make the request - response = client.create_ssh_key(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py deleted file mode 100644 index cfc2bb3a3629..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_create_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_volume_snapshot(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py deleted file mode 100644 index 04444ef0d246..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_create_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.CreateVolumeSnapshotRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_volume_snapshot(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py deleted file mode 100644 index 4a536116b32c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_delete_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteNfsShareRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py deleted file mode 100644 index 27282c5a63b9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_delete_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteNfsShareRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py deleted file mode 100644 index 63af9bc370f6..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSSHKey -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_delete_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteSSHKeyRequest( - name="name_value", - ) - - # Make the request - await client.delete_ssh_key(request=request) - - -# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py deleted file mode 100644 index 0f2387df3d75..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSSHKey -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_delete_ssh_key(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteSSHKeyRequest( - name="name_value", - ) - - # Make the request - client.delete_ssh_key(request=request) - - -# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py deleted file mode 100644 index f37557159caf..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_delete_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - await client.delete_volume_snapshot(request=request) - - -# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py deleted file mode 100644 index 21e9adec6925..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_delete_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DeleteVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - client.delete_volume_snapshot(request=request) - - -# [END baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py deleted file mode 100644 index b40013650941..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DetachLun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DetachLun_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_detach_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DetachLunRequest( - instance="instance_value", - lun="lun_value", - ) - - # Make the request - operation = client.detach_lun(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_DetachLun_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py deleted file mode 100644 index dc085715668f..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DetachLun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DetachLun_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_detach_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DetachLunRequest( - instance="instance_value", - lun="lun_value", - ) - - # Make the request - operation = client.detach_lun(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_DetachLun_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py deleted file mode 100644 index 38236b8e7a0e..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DisableInteractiveSerialConsole -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_disable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.disable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py deleted file mode 100644 index 9d6d1c5557ca..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DisableInteractiveSerialConsole -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_disable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.DisableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.disable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py deleted file mode 100644 index 21fe63180017..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnableInteractiveSerialConsole -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_enable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.enable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py deleted file mode 100644 index 118fe6f34fb5..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnableInteractiveSerialConsole -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_enable_interactive_serial_console(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EnableInteractiveSerialConsoleRequest( - name="name_value", - ) - - # Make the request - operation = client.enable_interactive_serial_console(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py deleted file mode 100644 index 44005f4fb32f..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EvictLun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_EvictLun_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_evict_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictLunRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_lun(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_EvictLun_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py deleted file mode 100644 index 1ba6af7866ab..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EvictLun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_EvictLun_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_evict_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictLunRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_lun(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_EvictLun_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py deleted file mode 100644 index a770139d1592..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EvictVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_evict_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictVolumeRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_volume(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py deleted file mode 100644 index 1f863f93134d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EvictVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_evict_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.EvictVolumeRequest( - name="name_value", - ) - - # Make the request - operation = client.evict_volume(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py deleted file mode 100644 index b4ff9bc4ad81..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_instance(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetInstance_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py deleted file mode 100644 index 7fa8aa23e2a7..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetInstanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_instance(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetInstance_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py deleted file mode 100644 index 049a8e180f01..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetLun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetLun_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetLunRequest( - name="name_value", - ) - - # Make the request - response = await client.get_lun(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetLun_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py deleted file mode 100644 index 4ba8ebf561ec..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetLun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetLun_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_lun(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetLunRequest( - name="name_value", - ) - - # Make the request - response = client.get_lun(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetLun_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py deleted file mode 100644 index 855c2e4f94e8..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetNetwork -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNetworkRequest( - name="name_value", - ) - - # Make the request - response = await client.get_network(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py deleted file mode 100644 index 599672e7432f..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetNetwork -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNetworkRequest( - name="name_value", - ) - - # Make the request - response = client.get_network(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py deleted file mode 100644 index be4614b06222..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNfsShareRequest( - name="name_value", - ) - - # Make the request - response = await client.get_nfs_share(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py deleted file mode 100644 index 9c967fedff90..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetNfsShareRequest( - name="name_value", - ) - - # Make the request - response = client.get_nfs_share(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py deleted file mode 100644 index 06acb697da0c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetProvisioningConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py deleted file mode 100644 index a18e5a4b450c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetProvisioningConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py deleted file mode 100644 index 6becdc2d5516..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetVolume_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_volume(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetVolume_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py deleted file mode 100644 index 53da8f8013f3..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_get_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - response = await client.get_volume_snapshot(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py deleted file mode 100644 index dee46c6cdea7..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeSnapshotRequest( - name="name_value", - ) - - # Make the request - response = client.get_volume_snapshot(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py deleted file mode 100644 index 64616da583e9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_GetVolume_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_get_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.GetVolumeRequest( - name="name_value", - ) - - # Make the request - response = client.get_volume(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_GetVolume_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py deleted file mode 100644 index cbb4e2457476..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListInstances_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_instances(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListInstances_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py deleted file mode 100644 index abe431c8c56b..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListInstances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListInstances_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_instances(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListInstancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_instances(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListInstances_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py deleted file mode 100644 index 0fb7ad3e6536..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListLuns_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_luns(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListLunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_luns(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListLuns_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py deleted file mode 100644 index 3c0a819163b9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListLuns_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_luns(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListLunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_luns(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListLuns_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py deleted file mode 100644 index e0b2ef239ba2..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListNetworkUsage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_network_usage(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworkUsageRequest( - location="location_value", - ) - - # Make the request - response = await client.list_network_usage(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py deleted file mode 100644 index 8f790da9a211..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListNetworkUsage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_network_usage(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworkUsageRequest( - location="location_value", - ) - - # Make the request - response = client.list_network_usage(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py deleted file mode 100644 index 550789bfee2a..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListNetworks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_networks(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_networks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py deleted file mode 100644 index a3089a4baff3..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListNetworks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_networks(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNetworksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_networks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py deleted file mode 100644 index cb7a6f8f5d3d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListNfsShares -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_nfs_shares(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNfsSharesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_nfs_shares(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py deleted file mode 100644 index a1cc12c9e437..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListNfsShares -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_nfs_shares(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListNfsSharesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_nfs_shares(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py deleted file mode 100644 index 2c8058a38d01..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListOSImages -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_os_images(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListOSImagesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_os_images(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py deleted file mode 100644 index a96a302cc9b9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListOSImages -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_os_images(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListOSImagesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_os_images(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py deleted file mode 100644 index a205012ae4e9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListProvisioningQuotas -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_provisioning_quotas(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListProvisioningQuotasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_provisioning_quotas(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py deleted file mode 100644 index 7de91ae8a3f4..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListProvisioningQuotas -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_provisioning_quotas(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListProvisioningQuotasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_provisioning_quotas(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py deleted file mode 100644 index e60d86305156..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSSHKeys -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_ssh_keys(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListSSHKeysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_ssh_keys(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py deleted file mode 100644 index 359b43e3190d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSSHKeys -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_ssh_keys(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListSSHKeysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_ssh_keys(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py deleted file mode 100644 index 6cffc0c4bd5f..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVolumeSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_volume_snapshots(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volume_snapshots(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py deleted file mode 100644 index a9d3f390897f..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVolumeSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_volume_snapshots(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumeSnapshotsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volume_snapshots(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py deleted file mode 100644 index def853ab5c09..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVolumes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_list_volumes(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volumes(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py deleted file mode 100644 index 3cc1f4a3e722..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListVolumes -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_list_volumes(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ListVolumesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_volumes(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py deleted file mode 100644 index 20c4bd45b8a1..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_rename_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameInstanceRequest( - name="name_value", - new_instance_id="new_instance_id_value", - ) - - # Make the request - response = await client.rename_instance(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py deleted file mode 100644 index b42ff6244ed6..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_rename_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameInstanceRequest( - name="name_value", - new_instance_id="new_instance_id_value", - ) - - # Make the request - response = client.rename_instance(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py deleted file mode 100644 index 765f297ea888..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameNetwork -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_rename_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNetworkRequest( - name="name_value", - new_network_id="new_network_id_value", - ) - - # Make the request - response = await client.rename_network(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py deleted file mode 100644 index 3f8709bcf2e5..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameNetwork -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_rename_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNetworkRequest( - name="name_value", - new_network_id="new_network_id_value", - ) - - # Make the request - response = client.rename_network(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py deleted file mode 100644 index e421cedd6b25..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_rename_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNfsShareRequest( - name="name_value", - new_nfsshare_id="new_nfsshare_id_value", - ) - - # Make the request - response = await client.rename_nfs_share(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py deleted file mode 100644 index b463dc2cd984..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_rename_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameNfsShareRequest( - name="name_value", - new_nfsshare_id="new_nfsshare_id_value", - ) - - # Make the request - response = client.rename_nfs_share(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py deleted file mode 100644 index 4520953b98ca..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_rename_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameVolumeRequest( - name="name_value", - new_volume_id="new_volume_id_value", - ) - - # Make the request - response = await client.rename_volume(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py deleted file mode 100644 index 96cecb450522..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_rename_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RenameVolumeRequest( - name="name_value", - new_volume_id="new_volume_id_value", - ) - - # Make the request - response = client.rename_volume(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py deleted file mode 100644 index ce8b3bf4486c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_reset_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResetInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.reset_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py deleted file mode 100644 index 65809fd2c41d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResetInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_reset_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResetInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.reset_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py deleted file mode 100644 index 4271b9105adb..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResizeVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_resize_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResizeVolumeRequest( - volume="volume_value", - ) - - # Make the request - operation = client.resize_volume(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py deleted file mode 100644 index cccaf8a41ad1..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResizeVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_resize_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.ResizeVolumeRequest( - volume="volume_value", - ) - - # Make the request - operation = client.resize_volume(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py deleted file mode 100644 index fd81114d33db..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_restore_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( - volume_snapshot="volume_snapshot_value", - ) - - # Make the request - operation = client.restore_volume_snapshot(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py deleted file mode 100644 index 9679b486bad2..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RestoreVolumeSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_restore_volume_snapshot(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.RestoreVolumeSnapshotRequest( - volume_snapshot="volume_snapshot_value", - ) - - # Make the request - operation = client.restore_volume_snapshot(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py deleted file mode 100644 index 723f32a11f8c..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_StartInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_start_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StartInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.start_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_StartInstance_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py deleted file mode 100644 index f9f5cc05082d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_StartInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_start_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StartInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.start_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_StartInstance_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py deleted file mode 100644 index fcd2c424a916..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StopInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_StopInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_stop_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StopInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.stop_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_StopInstance_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py deleted file mode 100644 index d5a20469c5ac..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StopInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_StopInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_stop_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.StopInstanceRequest( - name="name_value", - ) - - # Make the request - operation = client.stop_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_StopInstance_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py deleted file mode 100644 index 9217f52b4799..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubmitProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_submit_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = await client.submit_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py deleted file mode 100644 index ece98fe2a6bd..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubmitProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_submit_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.SubmitProvisioningConfigRequest( - parent="parent_value", - ) - - # Make the request - response = client.submit_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_instance_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_instance_async.py deleted file mode 100644 index 9853a4c53ffa..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_instance_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateInstance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_update_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateInstanceRequest( - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateInstance_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_instance_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_instance_sync.py deleted file mode 100644 index 8628dd8ec63b..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_instance_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateInstance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateInstance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_update_instance(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateInstanceRequest( - ) - - # Make the request - operation = client.update_instance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateInstance_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_network_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_network_async.py deleted file mode 100644 index b4f12f01400b..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_network_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateNetwork -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateNetwork_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_update_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNetworkRequest( - ) - - # Make the request - operation = client.update_network(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateNetwork_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_network_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_network_sync.py deleted file mode 100644 index f5451184ca50..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_network_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateNetwork -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateNetwork_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_update_network(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNetworkRequest( - ) - - # Make the request - operation = client.update_network(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateNetwork_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_async.py deleted file mode 100644 index fc1b4a63e63e..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateNfsShare_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_update_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNfsShareRequest( - ) - - # Make the request - operation = client.update_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateNfsShare_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_sync.py deleted file mode 100644 index 28ab3d6cded9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateNfsShare -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateNfsShare_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_update_nfs_share(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateNfsShareRequest( - ) - - # Make the request - operation = client.update_nfs_share(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateNfsShare_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py deleted file mode 100644 index 51fa4e4ecedd..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_update_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( - ) - - # Make the request - response = await client.update_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py deleted file mode 100644 index 43acf5dd1527..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateProvisioningConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_update_provisioning_config(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateProvisioningConfigRequest( - ) - - # Make the request - response = client.update_provisioning_config(request=request) - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_volume_async.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_volume_async.py deleted file mode 100644 index 6c6e30d962b9..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_volume_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateVolume_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -async def sample_update_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionAsyncClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateVolumeRequest( - ) - - # Make the request - operation = client.update_volume(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateVolume_async] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_volume_sync.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_volume_sync.py deleted file mode 100644 index 1194778b4b34..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/baremetalsolution_v2_generated_bare_metal_solution_update_volume_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateVolume -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bare-metal-solution - - -# [START baremetalsolution_v2_generated_BareMetalSolution_UpdateVolume_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bare_metal_solution_v2 - - -def sample_update_volume(): - # Create a client - client = bare_metal_solution_v2.BareMetalSolutionClient() - - # Initialize request argument(s) - request = bare_metal_solution_v2.UpdateVolumeRequest( - ) - - # Make the request - operation = client.update_volume(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END baremetalsolution_v2_generated_BareMetalSolution_UpdateVolume_sync] diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json deleted file mode 100644 index 2e341fc16178..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ /dev/null @@ -1,7223 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.baremetalsolution.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-bare-metal-solution", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "nfs_share", - "type": "google.cloud.bare_metal_solution_v2.types.NfsShare" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_nfs_share" - }, - "description": "Sample for CreateNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateNfsShareRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "nfs_share", - "type": "google.cloud.bare_metal_solution_v2.types.NfsShare" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_nfs_share" - }, - "description": "Sample for CreateNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateNfsShare_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_nfs_share_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "provisioning_config", - "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", - "shortName": "create_provisioning_config" - }, - "description": "Sample for CreateProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateProvisioningConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "provisioning_config", - "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", - "shortName": "create_provisioning_config" - }, - "description": "Sample for CreateProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateProvisioningConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_provisioning_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_ssh_key", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateSSHKey", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateSSHKey" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "ssh_key", - "type": "google.cloud.bare_metal_solution_v2.types.SSHKey" - }, - { - "name": "ssh_key_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.SSHKey", - "shortName": "create_ssh_key" - }, - "description": "Sample for CreateSSHKey", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_ssh_key", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateSSHKey", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateSSHKey" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateSSHKeyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "ssh_key", - "type": "google.cloud.bare_metal_solution_v2.types.SSHKey" - }, - { - "name": "ssh_key_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.SSHKey", - "shortName": "create_ssh_key" - }, - "description": "Sample for CreateSSHKey", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateSSHKey_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_ssh_key_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.create_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "volume_snapshot", - "type": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", - "shortName": "create_volume_snapshot" - }, - "description": "Sample for CreateVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.create_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.CreateVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "CreateVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.CreateVolumeSnapshotRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "volume_snapshot", - "type": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", - "shortName": "create_volume_snapshot" - }, - "description": "Sample for CreateVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_CreateVolumeSnapshot_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_create_volume_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.delete_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DeleteNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_nfs_share" - }, - "description": "Sample for DeleteNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.delete_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DeleteNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DeleteNfsShareRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_nfs_share" - }, - "description": "Sample for DeleteNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteNfsShare_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_nfs_share_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.delete_ssh_key", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteSSHKey", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DeleteSSHKey" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_ssh_key" - }, - "description": "Sample for DeleteSSHKey", - "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.delete_ssh_key", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteSSHKey", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DeleteSSHKey" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DeleteSSHKeyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_ssh_key" - }, - "description": "Sample for DeleteSSHKey", - "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteSSHKey_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_ssh_key_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.delete_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DeleteVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_volume_snapshot" - }, - "description": "Sample for DeleteVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.delete_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DeleteVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DeleteVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DeleteVolumeSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_volume_snapshot" - }, - "description": "Sample for DeleteVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DeleteVolumeSnapshot_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_delete_volume_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.detach_lun", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DetachLun", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DetachLun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DetachLunRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "lun", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "detach_lun" - }, - "description": "Sample for DetachLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DetachLun_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.detach_lun", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DetachLun", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DetachLun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DetachLunRequest" - }, - { - "name": "instance", - "type": "str" - }, - { - "name": "lun", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "detach_lun" - }, - "description": "Sample for DetachLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DetachLun_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_detach_lun_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.disable_interactive_serial_console", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DisableInteractiveSerialConsole", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DisableInteractiveSerialConsole" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "disable_interactive_serial_console" - }, - "description": "Sample for DisableInteractiveSerialConsole", - "file": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.disable_interactive_serial_console", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.DisableInteractiveSerialConsole", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "DisableInteractiveSerialConsole" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.DisableInteractiveSerialConsoleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "disable_interactive_serial_console" - }, - "description": "Sample for DisableInteractiveSerialConsole", - "file": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_DisableInteractiveSerialConsole_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_disable_interactive_serial_console_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.enable_interactive_serial_console", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EnableInteractiveSerialConsole", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "EnableInteractiveSerialConsole" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "enable_interactive_serial_console" - }, - "description": "Sample for EnableInteractiveSerialConsole", - "file": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.enable_interactive_serial_console", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EnableInteractiveSerialConsole", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "EnableInteractiveSerialConsole" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.EnableInteractiveSerialConsoleRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "enable_interactive_serial_console" - }, - "description": "Sample for EnableInteractiveSerialConsole", - "file": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EnableInteractiveSerialConsole_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_enable_interactive_serial_console_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.evict_lun", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictLun", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "EvictLun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.EvictLunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "evict_lun" - }, - "description": "Sample for EvictLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictLun_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.evict_lun", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictLun", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "EvictLun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.EvictLunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "evict_lun" - }, - "description": "Sample for EvictLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictLun_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_lun_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.evict_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "EvictVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "evict_volume" - }, - "description": "Sample for EvictVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.evict_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.EvictVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "EvictVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.EvictVolumeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "evict_volume" - }, - "description": "Sample for EvictVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_EvictVolume_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_evict_volume_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetInstance_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", - "shortName": "get_instance" - }, - "description": "Sample for GetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetInstance_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_lun", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetLun", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetLun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetLunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Lun", - "shortName": "get_lun" - }, - "description": "Sample for GetLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetLun_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_lun", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetLun", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetLun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetLunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Lun", - "shortName": "get_lun" - }, - "description": "Sample for GetLun", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetLun_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_lun_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_network", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNetwork", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetNetwork" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNetworkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Network", - "shortName": "get_network" - }, - "description": "Sample for GetNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_network_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_network", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNetwork", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetNetwork" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNetworkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Network", - "shortName": "get_network" - }, - "description": "Sample for GetNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNetwork_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_network_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", - "shortName": "get_nfs_share" - }, - "description": "Sample for GetNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetNfsShareRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", - "shortName": "get_nfs_share" - }, - "description": "Sample for GetNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetNfsShare_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_nfs_share_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", - "shortName": "get_provisioning_config" - }, - "description": "Sample for GetProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetProvisioningConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", - "shortName": "get_provisioning_config" - }, - "description": "Sample for GetProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetProvisioningConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_provisioning_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", - "shortName": "get_volume_snapshot" - }, - "description": "Sample for GetVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.VolumeSnapshot", - "shortName": "get_volume_snapshot" - }, - "description": "Sample for GetVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolumeSnapshot_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.get_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", - "shortName": "get_volume" - }, - "description": "Sample for GetVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolume_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.get_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.GetVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "GetVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.GetVolumeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", - "shortName": "get_volume" - }, - "description": "Sample for GetVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_GetVolume_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_get_volume_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_instances", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListInstances", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesAsyncPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListInstances_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_instances", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListInstances", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListInstances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListInstancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListInstancesPager", - "shortName": "list_instances" - }, - "description": "Sample for ListInstances", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListInstances_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_instances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_luns", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListLuns", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListLuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListLunsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsAsyncPager", - "shortName": "list_luns" - }, - "description": "Sample for ListLuns", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListLuns_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_luns", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListLuns", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListLuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListLunsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListLunsPager", - "shortName": "list_luns" - }, - "description": "Sample for ListLuns", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListLuns_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_luns_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_network_usage", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworkUsage", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListNetworkUsage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest" - }, - { - "name": "location", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse", - "shortName": "list_network_usage" - }, - "description": "Sample for ListNetworkUsage", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_network_usage", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworkUsage", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListNetworkUsage" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageRequest" - }, - { - "name": "location", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ListNetworkUsageResponse", - "shortName": "list_network_usage" - }, - "description": "Sample for ListNetworkUsage", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworkUsage_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_network_usage_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_networks", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworks", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListNetworks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksAsyncPager", - "shortName": "list_networks" - }, - "description": "Sample for ListNetworks", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_networks", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNetworks", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListNetworks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNetworksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNetworksPager", - "shortName": "list_networks" - }, - "description": "Sample for ListNetworks", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNetworks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_networks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_nfs_shares", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNfsShares", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListNfsShares" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesAsyncPager", - "shortName": "list_nfs_shares" - }, - "description": "Sample for ListNfsShares", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_nfs_shares", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListNfsShares", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListNfsShares" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListNfsSharesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListNfsSharesPager", - "shortName": "list_nfs_shares" - }, - "description": "Sample for ListNfsShares", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListNfsShares_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_nfs_shares_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_os_images", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListOSImages", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListOSImages" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesAsyncPager", - "shortName": "list_os_images" - }, - "description": "Sample for ListOSImages", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_os_images", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListOSImages", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListOSImages" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListOSImagesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListOSImagesPager", - "shortName": "list_os_images" - }, - "description": "Sample for ListOSImages", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListOSImages_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_os_images_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_provisioning_quotas", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListProvisioningQuotas", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListProvisioningQuotas" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasAsyncPager", - "shortName": "list_provisioning_quotas" - }, - "description": "Sample for ListProvisioningQuotas", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_provisioning_quotas", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListProvisioningQuotas", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListProvisioningQuotas" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListProvisioningQuotasRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListProvisioningQuotasPager", - "shortName": "list_provisioning_quotas" - }, - "description": "Sample for ListProvisioningQuotas", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListProvisioningQuotas_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_provisioning_quotas_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_ssh_keys", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListSSHKeys", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListSSHKeys" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysAsyncPager", - "shortName": "list_ssh_keys" - }, - "description": "Sample for ListSSHKeys", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_ssh_keys", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListSSHKeys", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListSSHKeys" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListSSHKeysRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListSSHKeysPager", - "shortName": "list_ssh_keys" - }, - "description": "Sample for ListSSHKeys", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListSSHKeys_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_ssh_keys_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_volume_snapshots", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumeSnapshots", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListVolumeSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsAsyncPager", - "shortName": "list_volume_snapshots" - }, - "description": "Sample for ListVolumeSnapshots", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_volume_snapshots", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumeSnapshots", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListVolumeSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListVolumeSnapshotsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumeSnapshotsPager", - "shortName": "list_volume_snapshots" - }, - "description": "Sample for ListVolumeSnapshots", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumeSnapshots_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volume_snapshots_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.list_volumes", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumes", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListVolumes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListVolumesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesAsyncPager", - "shortName": "list_volumes" - }, - "description": "Sample for ListVolumes", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.list_volumes", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ListVolumes", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ListVolumes" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ListVolumesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.services.bare_metal_solution.pagers.ListVolumesPager", - "shortName": "list_volumes" - }, - "description": "Sample for ListVolumes", - "file": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ListVolumes_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_list_volumes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_instance_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", - "shortName": "rename_instance" - }, - "description": "Sample for RenameInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_instance_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Instance", - "shortName": "rename_instance" - }, - "description": "Sample for RenameInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameInstance_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_network", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNetwork", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameNetwork" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_network_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Network", - "shortName": "rename_network" - }, - "description": "Sample for RenameNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_network", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNetwork", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameNetwork" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameNetworkRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_network_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Network", - "shortName": "rename_network" - }, - "description": "Sample for RenameNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNetwork_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_network_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_nfsshare_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", - "shortName": "rename_nfs_share" - }, - "description": "Sample for RenameNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameNfsShareRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_nfsshare_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.NfsShare", - "shortName": "rename_nfs_share" - }, - "description": "Sample for RenameNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameNfsShare_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_nfs_share_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.rename_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_volume_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", - "shortName": "rename_volume" - }, - "description": "Sample for RenameVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.rename_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RenameVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RenameVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RenameVolumeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_volume_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.Volume", - "shortName": "rename_volume" - }, - "description": "Sample for RenameVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RenameVolume_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_rename_volume_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.reset_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResetInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ResetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "reset_instance" - }, - "description": "Sample for ResetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.reset_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResetInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ResetInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResetInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "reset_instance" - }, - "description": "Sample for ResetInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResetInstance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_reset_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.resize_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResizeVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ResizeVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest" - }, - { - "name": "volume", - "type": "str" - }, - { - "name": "size_gib", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resize_volume" - }, - "description": "Sample for ResizeVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.resize_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.ResizeVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "ResizeVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.ResizeVolumeRequest" - }, - { - "name": "volume", - "type": "str" - }, - { - "name": "size_gib", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "resize_volume" - }, - "description": "Sample for ResizeVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_ResizeVolume_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_resize_volume_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.restore_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RestoreVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RestoreVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest" - }, - { - "name": "volume_snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restore_volume_snapshot" - }, - "description": "Sample for RestoreVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.restore_volume_snapshot", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.RestoreVolumeSnapshot", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "RestoreVolumeSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.RestoreVolumeSnapshotRequest" - }, - { - "name": "volume_snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restore_volume_snapshot" - }, - "description": "Sample for RestoreVolumeSnapshot", - "file": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_RestoreVolumeSnapshot_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_restore_volume_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.start_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StartInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "StartInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StartInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "start_instance" - }, - "description": "Sample for StartInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StartInstance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.start_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StartInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "StartInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StartInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "start_instance" - }, - "description": "Sample for StartInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StartInstance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_start_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.stop_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StopInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "StopInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StopInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "stop_instance" - }, - "description": "Sample for StopInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StopInstance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.stop_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.StopInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "StopInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.StopInstanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "stop_instance" - }, - "description": "Sample for StopInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_StopInstance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_stop_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.submit_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.SubmitProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "SubmitProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "provisioning_config", - "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse", - "shortName": "submit_provisioning_config" - }, - "description": "Sample for SubmitProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.submit_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.SubmitProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "SubmitProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "provisioning_config", - "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.SubmitProvisioningConfigResponse", - "shortName": "submit_provisioning_config" - }, - "description": "Sample for SubmitProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_SubmitProvisioningConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_submit_provisioning_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.update_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.bare_metal_solution_v2.types.Instance" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_instance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateInstance_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_instance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.update_instance", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateInstance", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateInstance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateInstanceRequest" - }, - { - "name": "instance", - "type": "google.cloud.bare_metal_solution_v2.types.Instance" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_instance" - }, - "description": "Sample for UpdateInstance", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_instance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateInstance_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_instance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.update_network", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateNetwork", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateNetwork" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest" - }, - { - "name": "network", - "type": "google.cloud.bare_metal_solution_v2.types.Network" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_network" - }, - "description": "Sample for UpdateNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_network_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateNetwork_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_network_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.update_network", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateNetwork", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateNetwork" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateNetworkRequest" - }, - { - "name": "network", - "type": "google.cloud.bare_metal_solution_v2.types.Network" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_network" - }, - "description": "Sample for UpdateNetwork", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_network_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateNetwork_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_network_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.update_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest" - }, - { - "name": "nfs_share", - "type": "google.cloud.bare_metal_solution_v2.types.NfsShare" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_nfs_share" - }, - "description": "Sample for UpdateNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateNfsShare_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.update_nfs_share", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateNfsShare", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateNfsShare" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateNfsShareRequest" - }, - { - "name": "nfs_share", - "type": "google.cloud.bare_metal_solution_v2.types.NfsShare" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_nfs_share" - }, - "description": "Sample for UpdateNfsShare", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateNfsShare_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_nfs_share_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.update_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest" - }, - { - "name": "provisioning_config", - "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", - "shortName": "update_provisioning_config" - }, - "description": "Sample for UpdateProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.update_provisioning_config", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateProvisioningConfig", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateProvisioningConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateProvisioningConfigRequest" - }, - { - "name": "provisioning_config", - "type": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bare_metal_solution_v2.types.ProvisioningConfig", - "shortName": "update_provisioning_config" - }, - "description": "Sample for UpdateProvisioningConfig", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateProvisioningConfig_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_provisioning_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient", - "shortName": "BareMetalSolutionAsyncClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionAsyncClient.update_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest" - }, - { - "name": "volume", - "type": "google.cloud.bare_metal_solution_v2.types.Volume" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_volume" - }, - "description": "Sample for UpdateVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_volume_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateVolume_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_volume_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient", - "shortName": "BareMetalSolutionClient" - }, - "fullName": "google.cloud.bare_metal_solution_v2.BareMetalSolutionClient.update_volume", - "method": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution.UpdateVolume", - "service": { - "fullName": "google.cloud.baremetalsolution.v2.BareMetalSolution", - "shortName": "BareMetalSolution" - }, - "shortName": "UpdateVolume" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bare_metal_solution_v2.types.UpdateVolumeRequest" - }, - { - "name": "volume", - "type": "google.cloud.bare_metal_solution_v2.types.Volume" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_volume" - }, - "description": "Sample for UpdateVolume", - "file": "baremetalsolution_v2_generated_bare_metal_solution_update_volume_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "baremetalsolution_v2_generated_BareMetalSolution_UpdateVolume_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "baremetalsolution_v2_generated_bare_metal_solution_update_volume_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/scripts/fixup_bare_metal_solution_v2_keywords.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/scripts/fixup_bare_metal_solution_v2_keywords.py deleted file mode 100644 index b623723d8e71..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/scripts/fixup_bare_metal_solution_v2_keywords.py +++ /dev/null @@ -1,219 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bare_metal_solutionCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_nfs_share': ('parent', 'nfs_share', ), - 'create_provisioning_config': ('parent', 'provisioning_config', 'email', ), - 'create_ssh_key': ('parent', 'ssh_key', 'ssh_key_id', ), - 'create_volume_snapshot': ('parent', 'volume_snapshot', ), - 'delete_nfs_share': ('name', ), - 'delete_ssh_key': ('name', ), - 'delete_volume_snapshot': ('name', ), - 'detach_lun': ('instance', 'lun', 'skip_reboot', ), - 'disable_interactive_serial_console': ('name', ), - 'enable_interactive_serial_console': ('name', ), - 'evict_lun': ('name', ), - 'evict_volume': ('name', ), - 'get_instance': ('name', ), - 'get_lun': ('name', ), - 'get_network': ('name', ), - 'get_nfs_share': ('name', ), - 'get_provisioning_config': ('name', ), - 'get_volume': ('name', ), - 'get_volume_snapshot': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_luns': ('parent', 'page_size', 'page_token', ), - 'list_networks': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_network_usage': ('location', ), - 'list_nfs_shares': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_os_images': ('parent', 'page_size', 'page_token', ), - 'list_provisioning_quotas': ('parent', 'page_size', 'page_token', ), - 'list_ssh_keys': ('parent', 'page_size', 'page_token', ), - 'list_volumes': ('parent', 'page_size', 'page_token', 'filter', ), - 'list_volume_snapshots': ('parent', 'page_size', 'page_token', ), - 'rename_instance': ('name', 'new_instance_id', ), - 'rename_network': ('name', 'new_network_id', ), - 'rename_nfs_share': ('name', 'new_nfsshare_id', ), - 'rename_volume': ('name', 'new_volume_id', ), - 'reset_instance': ('name', ), - 'resize_volume': ('volume', 'size_gib', ), - 'restore_volume_snapshot': ('volume_snapshot', ), - 'start_instance': ('name', ), - 'stop_instance': ('name', ), - 'submit_provisioning_config': ('parent', 'provisioning_config', 'email', ), - 'update_instance': ('instance', 'update_mask', ), - 'update_network': ('network', 'update_mask', ), - 'update_nfs_share': ('nfs_share', 'update_mask', ), - 'update_provisioning_config': ('provisioning_config', 'update_mask', 'email', ), - 'update_volume': ('volume', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bare_metal_solutionCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bare_metal_solution client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/setup.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/setup.py deleted file mode 100644 index 7b03302a3355..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bare-metal-solution' - - -description = "Google Cloud Bare Metal Solution API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bare_metal_solution/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bare-metal-solution" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/bare_metal_solution_v2/__init__.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/bare_metal_solution_v2/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/bare_metal_solution_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py b/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py deleted file mode 100644 index bf9e83cc1581..000000000000 --- a/owl-bot-staging/google-cloud-bare-metal-solution/v2/tests/unit/gapic/bare_metal_solution_v2/test_bare_metal_solution.py +++ /dev/null @@ -1,35392 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import BareMetalSolutionAsyncClient -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import BareMetalSolutionClient -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import pagers -from google.cloud.bare_metal_solution_v2.services.bare_metal_solution import transports -from google.cloud.bare_metal_solution_v2.types import baremetalsolution -from google.cloud.bare_metal_solution_v2.types import common -from google.cloud.bare_metal_solution_v2.types import instance -from google.cloud.bare_metal_solution_v2.types import instance as gcb_instance -from google.cloud.bare_metal_solution_v2.types import lun -from google.cloud.bare_metal_solution_v2.types import network -from google.cloud.bare_metal_solution_v2.types import network as gcb_network -from google.cloud.bare_metal_solution_v2.types import nfs_share -from google.cloud.bare_metal_solution_v2.types import nfs_share as gcb_nfs_share -from google.cloud.bare_metal_solution_v2.types import osimage -from google.cloud.bare_metal_solution_v2.types import provisioning -from google.cloud.bare_metal_solution_v2.types import ssh_key -from google.cloud.bare_metal_solution_v2.types import ssh_key as gcb_ssh_key -from google.cloud.bare_metal_solution_v2.types import volume -from google.cloud.bare_metal_solution_v2.types import volume as gcb_volume -from google.cloud.bare_metal_solution_v2.types import volume_snapshot -from google.cloud.bare_metal_solution_v2.types import volume_snapshot as gcb_volume_snapshot -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BareMetalSolutionClient._get_default_mtls_endpoint(None) is None - assert BareMetalSolutionClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BareMetalSolutionClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BareMetalSolutionClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BareMetalSolutionClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BareMetalSolutionClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert BareMetalSolutionClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BareMetalSolutionClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BareMetalSolutionClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BareMetalSolutionClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BareMetalSolutionClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BareMetalSolutionClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BareMetalSolutionClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - BareMetalSolutionClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BareMetalSolutionClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert BareMetalSolutionClient._get_client_cert_source(None, False) is None - assert BareMetalSolutionClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BareMetalSolutionClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BareMetalSolutionClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BareMetalSolutionClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(BareMetalSolutionClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionClient)) -@mock.patch.object(BareMetalSolutionAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = BareMetalSolutionClient._DEFAULT_UNIVERSE - default_endpoint = BareMetalSolutionClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BareMetalSolutionClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert BareMetalSolutionClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BareMetalSolutionClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BareMetalSolutionClient.DEFAULT_MTLS_ENDPOINT - assert BareMetalSolutionClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BareMetalSolutionClient._get_api_endpoint(None, None, default_universe, "always") == BareMetalSolutionClient.DEFAULT_MTLS_ENDPOINT - assert BareMetalSolutionClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BareMetalSolutionClient.DEFAULT_MTLS_ENDPOINT - assert BareMetalSolutionClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BareMetalSolutionClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - BareMetalSolutionClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert BareMetalSolutionClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BareMetalSolutionClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BareMetalSolutionClient._get_universe_domain(None, None) == BareMetalSolutionClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - BareMetalSolutionClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = BareMetalSolutionClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = BareMetalSolutionClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (BareMetalSolutionClient, "grpc"), - (BareMetalSolutionAsyncClient, "grpc_asyncio"), - (BareMetalSolutionClient, "rest"), -]) -def test_bare_metal_solution_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'baremetalsolution.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://baremetalsolution.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BareMetalSolutionGrpcTransport, "grpc"), - (transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BareMetalSolutionRestTransport, "rest"), -]) -def test_bare_metal_solution_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BareMetalSolutionClient, "grpc"), - (BareMetalSolutionAsyncClient, "grpc_asyncio"), - (BareMetalSolutionClient, "rest"), -]) -def test_bare_metal_solution_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'baremetalsolution.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://baremetalsolution.googleapis.com' - ) - - -def test_bare_metal_solution_client_get_transport_class(): - transport = BareMetalSolutionClient.get_transport_class() - available_transports = [ - transports.BareMetalSolutionGrpcTransport, - transports.BareMetalSolutionRestTransport, - ] - assert transport in available_transports - - transport = BareMetalSolutionClient.get_transport_class("grpc") - assert transport == transports.BareMetalSolutionGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc"), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio"), - (BareMetalSolutionClient, transports.BareMetalSolutionRestTransport, "rest"), -]) -@mock.patch.object(BareMetalSolutionClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionClient)) -@mock.patch.object(BareMetalSolutionAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionAsyncClient)) -def test_bare_metal_solution_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BareMetalSolutionClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BareMetalSolutionClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc", "true"), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc", "false"), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BareMetalSolutionClient, transports.BareMetalSolutionRestTransport, "rest", "true"), - (BareMetalSolutionClient, transports.BareMetalSolutionRestTransport, "rest", "false"), -]) -@mock.patch.object(BareMetalSolutionClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionClient)) -@mock.patch.object(BareMetalSolutionAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_bare_metal_solution_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BareMetalSolutionClient, BareMetalSolutionAsyncClient -]) -@mock.patch.object(BareMetalSolutionClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BareMetalSolutionClient)) -@mock.patch.object(BareMetalSolutionAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BareMetalSolutionAsyncClient)) -def test_bare_metal_solution_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - BareMetalSolutionClient, BareMetalSolutionAsyncClient -]) -@mock.patch.object(BareMetalSolutionClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionClient)) -@mock.patch.object(BareMetalSolutionAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BareMetalSolutionAsyncClient)) -def test_bare_metal_solution_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = BareMetalSolutionClient._DEFAULT_UNIVERSE - default_endpoint = BareMetalSolutionClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BareMetalSolutionClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc"), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio"), - (BareMetalSolutionClient, transports.BareMetalSolutionRestTransport, "rest"), -]) -def test_bare_metal_solution_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc", grpc_helpers), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BareMetalSolutionClient, transports.BareMetalSolutionRestTransport, "rest", None), -]) -def test_bare_metal_solution_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_bare_metal_solution_client_client_options_from_dict(): - with mock.patch('google.cloud.bare_metal_solution_v2.services.bare_metal_solution.transports.BareMetalSolutionGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BareMetalSolutionClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport, "grpc", grpc_helpers), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_bare_metal_solution_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "baremetalsolution.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="baremetalsolution.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - instance.ListInstancesRequest, - dict, -]) -def test_list_instances(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.ListInstancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_instances_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instances(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_instances_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - request = {} - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_instances in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc - - request = {} - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=instance.ListInstancesRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(instance.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.ListInstancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_instances_async_from_dict(): - await test_list_instances_async(request_type=dict) - -def test_list_instances_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.ListInstancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = instance.ListInstancesResponse() - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_instances_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.ListInstancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.ListInstancesResponse()) - await client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_instances_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.ListInstancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_instances_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - instance.ListInstancesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_instances_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.ListInstancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.ListInstancesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_instances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_instances_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_instances( - instance.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - instance.Instance(), - ], - next_page_token='abc', - ), - instance.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - ], - next_page_token='ghi', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_instances(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, instance.Instance) - for i in results) -def test_list_instances_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - instance.Instance(), - ], - next_page_token='abc', - ), - instance.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - ], - next_page_token='ghi', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_instances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_instances_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - instance.Instance(), - ], - next_page_token='abc', - ), - instance.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - ], - next_page_token='ghi', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_instances(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, instance.Instance) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_instances_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - instance.Instance(), - ], - next_page_token='abc', - ), - instance.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - ], - next_page_token='ghi', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_instances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - instance.GetInstanceRequest, - dict, -]) -def test_get_instance(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - ) - response = client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.GetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.machine_type == 'machine_type_value' - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == 'os_image_value' - assert response.pod == 'pod_value' - assert response.network_template == 'network_template_value' - assert response.login_info == 'login_info_value' - assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC - assert response.firmware_version == 'firmware_version_value' - - -def test_get_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.GetInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.GetInstanceRequest( - name='name_value', - ) - -def test_get_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - request = {} - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc - - request = {} - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=instance.GetInstanceRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - )) - response = await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.GetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.machine_type == 'machine_type_value' - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == 'os_image_value' - assert response.pod == 'pod_value' - assert response.network_template == 'network_template_value' - assert response.login_info == 'login_info_value' - assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC - assert response.firmware_version == 'firmware_version_value' - - -@pytest.mark.asyncio -async def test_get_instance_async_from_dict(): - await test_get_instance_async(request_type=dict) - -def test_get_instance_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.GetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = instance.Instance() - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_instance_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.GetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) - await client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_instance_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_instance_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - instance.GetInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_instance_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_instance_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_instance( - instance.GetInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_instance.UpdateInstanceRequest, - dict, -]) -def test_update_instance(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_instance.UpdateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_instance.UpdateInstanceRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_instance.UpdateInstanceRequest( - ) - -def test_update_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - request = {} - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc - - request = {} - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=gcb_instance.UpdateInstanceRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_instance.UpdateInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_instance_async_from_dict(): - await test_update_instance_async(request_type=dict) - -def test_update_instance_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_instance.UpdateInstanceRequest() - - request.instance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_instance_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_instance.UpdateInstanceRequest() - - request.instance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance.name=name_value', - ) in kw['metadata'] - - -def test_update_instance_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_instance( - instance=gcb_instance.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = gcb_instance.Instance(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_instance_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - gcb_instance.UpdateInstanceRequest(), - instance=gcb_instance.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_instance_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_instance( - instance=gcb_instance.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = gcb_instance.Instance(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_instance_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_instance( - gcb_instance.UpdateInstanceRequest(), - instance=gcb_instance.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - instance.RenameInstanceRequest, - dict, -]) -def test_rename_instance(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - ) - response = client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.RenameInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.machine_type == 'machine_type_value' - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == 'os_image_value' - assert response.pod == 'pod_value' - assert response.network_template == 'network_template_value' - assert response.login_info == 'login_info_value' - assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC - assert response.firmware_version == 'firmware_version_value' - - -def test_rename_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.RenameInstanceRequest( - name='name_value', - new_instance_id='new_instance_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.RenameInstanceRequest( - name='name_value', - new_instance_id='new_instance_id_value', - ) - -def test_rename_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_instance] = mock_rpc - request = {} - client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_instance] = mock_rpc - - request = {} - await client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_instance_async(transport: str = 'grpc_asyncio', request_type=instance.RenameInstanceRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - )) - response = await client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.RenameInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.machine_type == 'machine_type_value' - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == 'os_image_value' - assert response.pod == 'pod_value' - assert response.network_template == 'network_template_value' - assert response.login_info == 'login_info_value' - assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC - assert response.firmware_version == 'firmware_version_value' - - -@pytest.mark.asyncio -async def test_rename_instance_async_from_dict(): - await test_rename_instance_async(request_type=dict) - -def test_rename_instance_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.RenameInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - call.return_value = instance.Instance() - client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_instance_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.RenameInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) - await client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_instance_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.Instance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_instance( - name='name_value', - new_instance_id='new_instance_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_instance_id - mock_val = 'new_instance_id_value' - assert arg == mock_val - - -def test_rename_instance_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_instance( - instance.RenameInstanceRequest(), - name='name_value', - new_instance_id='new_instance_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_instance_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = instance.Instance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_instance( - name='name_value', - new_instance_id='new_instance_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_instance_id - mock_val = 'new_instance_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_instance_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_instance( - instance.RenameInstanceRequest(), - name='name_value', - new_instance_id='new_instance_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - instance.ResetInstanceRequest, - dict, -]) -def test_reset_instance(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.ResetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_reset_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.ResetInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.reset_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.ResetInstanceRequest( - name='name_value', - ) - -def test_reset_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.reset_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.reset_instance] = mock_rpc - request = {} - client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.reset_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_reset_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.reset_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.reset_instance] = mock_rpc - - request = {} - await client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.reset_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_reset_instance_async(transport: str = 'grpc_asyncio', request_type=instance.ResetInstanceRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.ResetInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_reset_instance_async_from_dict(): - await test_reset_instance_async(request_type=dict) - -def test_reset_instance_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.ResetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reset_instance_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.ResetInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_reset_instance_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.reset_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_reset_instance_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reset_instance( - instance.ResetInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_reset_instance_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.reset_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_reset_instance_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.reset_instance( - instance.ResetInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - instance.StartInstanceRequest, - dict, -]) -def test_start_instance(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.StartInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_start_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.StartInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.start_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.StartInstanceRequest( - name='name_value', - ) - -def test_start_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.start_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.start_instance] = mock_rpc - request = {} - client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.start_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.start_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.start_instance] = mock_rpc - - request = {} - await client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.start_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_instance_async(transport: str = 'grpc_asyncio', request_type=instance.StartInstanceRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.StartInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_start_instance_async_from_dict(): - await test_start_instance_async(request_type=dict) - -def test_start_instance_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.StartInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_start_instance_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.StartInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_start_instance_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.start_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_start_instance_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.start_instance( - instance.StartInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_start_instance_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.start_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_start_instance_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.start_instance( - instance.StartInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - instance.StopInstanceRequest, - dict, -]) -def test_stop_instance(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.StopInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_stop_instance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.StopInstanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.stop_instance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.StopInstanceRequest( - name='name_value', - ) - -def test_stop_instance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.stop_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.stop_instance] = mock_rpc - request = {} - client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.stop_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_stop_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.stop_instance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.stop_instance] = mock_rpc - - request = {} - await client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.stop_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_stop_instance_async(transport: str = 'grpc_asyncio', request_type=instance.StopInstanceRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.StopInstanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_stop_instance_async_from_dict(): - await test_stop_instance_async(request_type=dict) - -def test_stop_instance_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.StopInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_stop_instance_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.StopInstanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_stop_instance_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.stop_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_stop_instance_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.stop_instance( - instance.StopInstanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_stop_instance_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.stop_instance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_stop_instance_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.stop_instance( - instance.StopInstanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - instance.EnableInteractiveSerialConsoleRequest, - dict, -]) -def test_enable_interactive_serial_console(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.EnableInteractiveSerialConsoleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_enable_interactive_serial_console_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.EnableInteractiveSerialConsoleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.enable_interactive_serial_console(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.EnableInteractiveSerialConsoleRequest( - name='name_value', - ) - -def test_enable_interactive_serial_console_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_interactive_serial_console in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.enable_interactive_serial_console] = mock_rpc - request = {} - client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.enable_interactive_serial_console(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.enable_interactive_serial_console in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.enable_interactive_serial_console] = mock_rpc - - request = {} - await client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.enable_interactive_serial_console(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_async(transport: str = 'grpc_asyncio', request_type=instance.EnableInteractiveSerialConsoleRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.EnableInteractiveSerialConsoleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_async_from_dict(): - await test_enable_interactive_serial_console_async(request_type=dict) - -def test_enable_interactive_serial_console_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.EnableInteractiveSerialConsoleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.EnableInteractiveSerialConsoleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_enable_interactive_serial_console_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.enable_interactive_serial_console( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_enable_interactive_serial_console_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_interactive_serial_console( - instance.EnableInteractiveSerialConsoleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.enable_interactive_serial_console( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.enable_interactive_serial_console( - instance.EnableInteractiveSerialConsoleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - instance.DisableInteractiveSerialConsoleRequest, - dict, -]) -def test_disable_interactive_serial_console(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = instance.DisableInteractiveSerialConsoleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_disable_interactive_serial_console_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = instance.DisableInteractiveSerialConsoleRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.disable_interactive_serial_console(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == instance.DisableInteractiveSerialConsoleRequest( - name='name_value', - ) - -def test_disable_interactive_serial_console_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.disable_interactive_serial_console in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.disable_interactive_serial_console] = mock_rpc - request = {} - client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.disable_interactive_serial_console(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.disable_interactive_serial_console in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.disable_interactive_serial_console] = mock_rpc - - request = {} - await client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.disable_interactive_serial_console(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_async(transport: str = 'grpc_asyncio', request_type=instance.DisableInteractiveSerialConsoleRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = instance.DisableInteractiveSerialConsoleRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_async_from_dict(): - await test_disable_interactive_serial_console_async(request_type=dict) - -def test_disable_interactive_serial_console_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.DisableInteractiveSerialConsoleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = instance.DisableInteractiveSerialConsoleRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_disable_interactive_serial_console_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.disable_interactive_serial_console( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_disable_interactive_serial_console_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_interactive_serial_console( - instance.DisableInteractiveSerialConsoleRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.disable_interactive_serial_console( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.disable_interactive_serial_console( - instance.DisableInteractiveSerialConsoleRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_instance.DetachLunRequest, - dict, -]) -def test_detach_lun(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_instance.DetachLunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_detach_lun_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_instance.DetachLunRequest( - instance='instance_value', - lun='lun_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.detach_lun(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_instance.DetachLunRequest( - instance='instance_value', - lun='lun_value', - ) - -def test_detach_lun_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.detach_lun in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.detach_lun] = mock_rpc - request = {} - client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.detach_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_detach_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.detach_lun in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.detach_lun] = mock_rpc - - request = {} - await client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.detach_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_detach_lun_async(transport: str = 'grpc_asyncio', request_type=gcb_instance.DetachLunRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_instance.DetachLunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_detach_lun_async_from_dict(): - await test_detach_lun_async(request_type=dict) - -def test_detach_lun_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_instance.DetachLunRequest() - - request.instance = 'instance_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance=instance_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_detach_lun_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_instance.DetachLunRequest() - - request.instance = 'instance_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'instance=instance_value', - ) in kw['metadata'] - - -def test_detach_lun_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.detach_lun( - instance='instance_value', - lun='lun_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = 'instance_value' - assert arg == mock_val - arg = args[0].lun - mock_val = 'lun_value' - assert arg == mock_val - - -def test_detach_lun_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.detach_lun( - gcb_instance.DetachLunRequest(), - instance='instance_value', - lun='lun_value', - ) - -@pytest.mark.asyncio -async def test_detach_lun_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.detach_lun( - instance='instance_value', - lun='lun_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].instance - mock_val = 'instance_value' - assert arg == mock_val - arg = args[0].lun - mock_val = 'lun_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_detach_lun_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.detach_lun( - gcb_instance.DetachLunRequest(), - instance='instance_value', - lun='lun_value', - ) - - -@pytest.mark.parametrize("request_type", [ - ssh_key.ListSSHKeysRequest, - dict, -]) -def test_list_ssh_keys(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = ssh_key.ListSSHKeysResponse( - next_page_token='next_page_token_value', - ) - response = client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ssh_key.ListSSHKeysRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSSHKeysPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_ssh_keys_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = ssh_key.ListSSHKeysRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_ssh_keys(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == ssh_key.ListSSHKeysRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_ssh_keys_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_ssh_keys in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_ssh_keys] = mock_rpc - request = {} - client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_ssh_keys(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_ssh_keys_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_ssh_keys in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_ssh_keys] = mock_rpc - - request = {} - await client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_ssh_keys(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_ssh_keys_async(transport: str = 'grpc_asyncio', request_type=ssh_key.ListSSHKeysRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(ssh_key.ListSSHKeysResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ssh_key.ListSSHKeysRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSSHKeysAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_ssh_keys_async_from_dict(): - await test_list_ssh_keys_async(request_type=dict) - -def test_list_ssh_keys_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = ssh_key.ListSSHKeysRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - call.return_value = ssh_key.ListSSHKeysResponse() - client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_ssh_keys_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = ssh_key.ListSSHKeysRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ssh_key.ListSSHKeysResponse()) - await client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_ssh_keys_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = ssh_key.ListSSHKeysResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_ssh_keys( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_ssh_keys_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_ssh_keys( - ssh_key.ListSSHKeysRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_ssh_keys_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = ssh_key.ListSSHKeysResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ssh_key.ListSSHKeysResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_ssh_keys( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_ssh_keys_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_ssh_keys( - ssh_key.ListSSHKeysRequest(), - parent='parent_value', - ) - - -def test_list_ssh_keys_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - next_page_token='abc', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[], - next_page_token='def', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ], - next_page_token='ghi', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_ssh_keys(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, ssh_key.SSHKey) - for i in results) -def test_list_ssh_keys_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - next_page_token='abc', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[], - next_page_token='def', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ], - next_page_token='ghi', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - ), - RuntimeError, - ) - pages = list(client.list_ssh_keys(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_ssh_keys_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - next_page_token='abc', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[], - next_page_token='def', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ], - next_page_token='ghi', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_ssh_keys(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, ssh_key.SSHKey) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_ssh_keys_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - next_page_token='abc', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[], - next_page_token='def', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ], - next_page_token='ghi', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_ssh_keys(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - gcb_ssh_key.CreateSSHKeyRequest, - dict, -]) -def test_create_ssh_key(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_ssh_key.SSHKey( - name='name_value', - public_key='public_key_value', - ) - response = client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_ssh_key.CreateSSHKeyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_ssh_key.SSHKey) - assert response.name == 'name_value' - assert response.public_key == 'public_key_value' - - -def test_create_ssh_key_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_ssh_key.CreateSSHKeyRequest( - parent='parent_value', - ssh_key_id='ssh_key_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_ssh_key(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_ssh_key.CreateSSHKeyRequest( - parent='parent_value', - ssh_key_id='ssh_key_id_value', - ) - -def test_create_ssh_key_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_ssh_key in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_ssh_key] = mock_rpc - request = {} - client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_ssh_key(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_ssh_key_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_ssh_key in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_ssh_key] = mock_rpc - - request = {} - await client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_ssh_key(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_ssh_key_async(transport: str = 'grpc_asyncio', request_type=gcb_ssh_key.CreateSSHKeyRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_ssh_key.SSHKey( - name='name_value', - public_key='public_key_value', - )) - response = await client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_ssh_key.CreateSSHKeyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_ssh_key.SSHKey) - assert response.name == 'name_value' - assert response.public_key == 'public_key_value' - - -@pytest.mark.asyncio -async def test_create_ssh_key_async_from_dict(): - await test_create_ssh_key_async(request_type=dict) - -def test_create_ssh_key_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_ssh_key.CreateSSHKeyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - call.return_value = gcb_ssh_key.SSHKey() - client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_ssh_key_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_ssh_key.CreateSSHKeyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_ssh_key.SSHKey()) - await client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_ssh_key_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_ssh_key.SSHKey() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_ssh_key( - parent='parent_value', - ssh_key=gcb_ssh_key.SSHKey(name='name_value'), - ssh_key_id='ssh_key_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].ssh_key - mock_val = gcb_ssh_key.SSHKey(name='name_value') - assert arg == mock_val - arg = args[0].ssh_key_id - mock_val = 'ssh_key_id_value' - assert arg == mock_val - - -def test_create_ssh_key_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_ssh_key( - gcb_ssh_key.CreateSSHKeyRequest(), - parent='parent_value', - ssh_key=gcb_ssh_key.SSHKey(name='name_value'), - ssh_key_id='ssh_key_id_value', - ) - -@pytest.mark.asyncio -async def test_create_ssh_key_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_ssh_key.SSHKey() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_ssh_key.SSHKey()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_ssh_key( - parent='parent_value', - ssh_key=gcb_ssh_key.SSHKey(name='name_value'), - ssh_key_id='ssh_key_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].ssh_key - mock_val = gcb_ssh_key.SSHKey(name='name_value') - assert arg == mock_val - arg = args[0].ssh_key_id - mock_val = 'ssh_key_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_ssh_key_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_ssh_key( - gcb_ssh_key.CreateSSHKeyRequest(), - parent='parent_value', - ssh_key=gcb_ssh_key.SSHKey(name='name_value'), - ssh_key_id='ssh_key_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - ssh_key.DeleteSSHKeyRequest, - dict, -]) -def test_delete_ssh_key(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = ssh_key.DeleteSSHKeyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_ssh_key_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = ssh_key.DeleteSSHKeyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_ssh_key(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == ssh_key.DeleteSSHKeyRequest( - name='name_value', - ) - -def test_delete_ssh_key_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_ssh_key in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_ssh_key] = mock_rpc - request = {} - client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_ssh_key(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_ssh_key_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_ssh_key in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_ssh_key] = mock_rpc - - request = {} - await client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_ssh_key(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_ssh_key_async(transport: str = 'grpc_asyncio', request_type=ssh_key.DeleteSSHKeyRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = ssh_key.DeleteSSHKeyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_ssh_key_async_from_dict(): - await test_delete_ssh_key_async(request_type=dict) - -def test_delete_ssh_key_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = ssh_key.DeleteSSHKeyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - call.return_value = None - client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_ssh_key_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = ssh_key.DeleteSSHKeyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_ssh_key_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_ssh_key( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_ssh_key_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_ssh_key( - ssh_key.DeleteSSHKeyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_ssh_key_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_ssh_key( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_ssh_key_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_ssh_key( - ssh_key.DeleteSSHKeyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - volume.ListVolumesRequest, - dict, -]) -def test_list_volumes(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.ListVolumesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume.ListVolumesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_volumes_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume.ListVolumesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_volumes(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume.ListVolumesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_volumes_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_volumes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_volumes] = mock_rpc - request = {} - client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_volumes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_volumes_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_volumes in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_volumes] = mock_rpc - - request = {} - await client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_volumes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_volumes_async(transport: str = 'grpc_asyncio', request_type=volume.ListVolumesRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(volume.ListVolumesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume.ListVolumesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_volumes_async_from_dict(): - await test_list_volumes_async(request_type=dict) - -def test_list_volumes_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.ListVolumesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - call.return_value = volume.ListVolumesResponse() - client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_volumes_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.ListVolumesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.ListVolumesResponse()) - await client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_volumes_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.ListVolumesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_volumes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_volumes_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_volumes( - volume.ListVolumesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_volumes_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.ListVolumesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.ListVolumesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_volumes( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_volumes_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_volumes( - volume.ListVolumesRequest(), - parent='parent_value', - ) - - -def test_list_volumes_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token='abc', - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token='def', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token='ghi', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_volumes(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume.Volume) - for i in results) -def test_list_volumes_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token='abc', - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token='def', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token='ghi', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - pages = list(client.list_volumes(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_volumes_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token='abc', - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token='def', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token='ghi', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_volumes(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, volume.Volume) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_volumes_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token='abc', - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token='def', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token='ghi', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_volumes(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - volume.GetVolumeRequest, - dict, -]) -def test_get_volume(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - ) - response = client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume.GetVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.originally_requested_size_gib == 3094 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.max_size_gib == 1265 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert response.snapshot_auto_delete_behavior == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - assert response.snapshot_enabled is True - assert response.pod == 'pod_value' - assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL - assert response.boot_volume is True - assert response.performance_tier == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED - assert response.notes == 'notes_value' - assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC - assert response.instances == ['instances_value'] - assert response.attached is True - - -def test_get_volume_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume.GetVolumeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_volume(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume.GetVolumeRequest( - name='name_value', - ) - -def test_get_volume_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_volume] = mock_rpc - request = {} - client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_volume in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_volume] = mock_rpc - - request = {} - await client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_volume_async(transport: str = 'grpc_asyncio', request_type=volume.GetVolumeRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - )) - response = await client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume.GetVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.originally_requested_size_gib == 3094 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.max_size_gib == 1265 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert response.snapshot_auto_delete_behavior == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - assert response.snapshot_enabled is True - assert response.pod == 'pod_value' - assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL - assert response.boot_volume is True - assert response.performance_tier == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED - assert response.notes == 'notes_value' - assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC - assert response.instances == ['instances_value'] - assert response.attached is True - - -@pytest.mark.asyncio -async def test_get_volume_async_from_dict(): - await test_get_volume_async(request_type=dict) - -def test_get_volume_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.GetVolumeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - call.return_value = volume.Volume() - client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_volume_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.GetVolumeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) - await client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_volume_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_volume( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_volume_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_volume( - volume.GetVolumeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_volume_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_volume( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_volume_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_volume( - volume.GetVolumeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume.UpdateVolumeRequest, - dict, -]) -def test_update_volume(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_volume.UpdateVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_volume_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_volume.UpdateVolumeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_volume(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.UpdateVolumeRequest( - ) - -def test_update_volume_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_volume] = mock_rpc - request = {} - client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_volume in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_volume] = mock_rpc - - request = {} - await client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_volume_async(transport: str = 'grpc_asyncio', request_type=gcb_volume.UpdateVolumeRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_volume.UpdateVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_volume_async_from_dict(): - await test_update_volume_async(request_type=dict) - -def test_update_volume_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume.UpdateVolumeRequest() - - request.volume.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'volume.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_volume_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume.UpdateVolumeRequest() - - request.volume.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'volume.name=name_value', - ) in kw['metadata'] - - -def test_update_volume_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_volume( - volume=gcb_volume.Volume(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = gcb_volume.Volume(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_volume_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_volume( - gcb_volume.UpdateVolumeRequest(), - volume=gcb_volume.Volume(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_volume_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_volume( - volume=gcb_volume.Volume(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = gcb_volume.Volume(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_volume_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_volume( - gcb_volume.UpdateVolumeRequest(), - volume=gcb_volume.Volume(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - volume.RenameVolumeRequest, - dict, -]) -def test_rename_volume(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - ) - response = client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume.RenameVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.originally_requested_size_gib == 3094 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.max_size_gib == 1265 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert response.snapshot_auto_delete_behavior == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - assert response.snapshot_enabled is True - assert response.pod == 'pod_value' - assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL - assert response.boot_volume is True - assert response.performance_tier == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED - assert response.notes == 'notes_value' - assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC - assert response.instances == ['instances_value'] - assert response.attached is True - - -def test_rename_volume_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume.RenameVolumeRequest( - name='name_value', - new_volume_id='new_volume_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_volume(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume.RenameVolumeRequest( - name='name_value', - new_volume_id='new_volume_id_value', - ) - -def test_rename_volume_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_volume] = mock_rpc - request = {} - client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_volume in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_volume] = mock_rpc - - request = {} - await client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_volume_async(transport: str = 'grpc_asyncio', request_type=volume.RenameVolumeRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - )) - response = await client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume.RenameVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.originally_requested_size_gib == 3094 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.max_size_gib == 1265 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert response.snapshot_auto_delete_behavior == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - assert response.snapshot_enabled is True - assert response.pod == 'pod_value' - assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL - assert response.boot_volume is True - assert response.performance_tier == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED - assert response.notes == 'notes_value' - assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC - assert response.instances == ['instances_value'] - assert response.attached is True - - -@pytest.mark.asyncio -async def test_rename_volume_async_from_dict(): - await test_rename_volume_async(request_type=dict) - -def test_rename_volume_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.RenameVolumeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - call.return_value = volume.Volume() - client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_volume_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.RenameVolumeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) - await client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_volume_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_volume( - name='name_value', - new_volume_id='new_volume_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_volume_id - mock_val = 'new_volume_id_value' - assert arg == mock_val - - -def test_rename_volume_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_volume( - volume.RenameVolumeRequest(), - name='name_value', - new_volume_id='new_volume_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_volume_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume.Volume() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_volume( - name='name_value', - new_volume_id='new_volume_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_volume_id - mock_val = 'new_volume_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_volume_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_volume( - volume.RenameVolumeRequest(), - name='name_value', - new_volume_id='new_volume_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - volume.EvictVolumeRequest, - dict, -]) -def test_evict_volume(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume.EvictVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_evict_volume_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume.EvictVolumeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.evict_volume(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume.EvictVolumeRequest( - name='name_value', - ) - -def test_evict_volume_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.evict_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.evict_volume] = mock_rpc - request = {} - client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.evict_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_evict_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.evict_volume in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.evict_volume] = mock_rpc - - request = {} - await client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.evict_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_evict_volume_async(transport: str = 'grpc_asyncio', request_type=volume.EvictVolumeRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume.EvictVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_evict_volume_async_from_dict(): - await test_evict_volume_async(request_type=dict) - -def test_evict_volume_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.EvictVolumeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_evict_volume_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume.EvictVolumeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_evict_volume_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.evict_volume( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_evict_volume_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.evict_volume( - volume.EvictVolumeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_evict_volume_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.evict_volume( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_evict_volume_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.evict_volume( - volume.EvictVolumeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume.ResizeVolumeRequest, - dict, -]) -def test_resize_volume(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_volume.ResizeVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_resize_volume_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_volume.ResizeVolumeRequest( - volume='volume_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.resize_volume(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume.ResizeVolumeRequest( - volume='volume_value', - ) - -def test_resize_volume_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.resize_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.resize_volume] = mock_rpc - request = {} - client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.resize_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_resize_volume_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.resize_volume in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.resize_volume] = mock_rpc - - request = {} - await client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.resize_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_resize_volume_async(transport: str = 'grpc_asyncio', request_type=gcb_volume.ResizeVolumeRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_volume.ResizeVolumeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_resize_volume_async_from_dict(): - await test_resize_volume_async(request_type=dict) - -def test_resize_volume_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume.ResizeVolumeRequest() - - request.volume = 'volume_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'volume=volume_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resize_volume_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume.ResizeVolumeRequest() - - request.volume = 'volume_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'volume=volume_value', - ) in kw['metadata'] - - -def test_resize_volume_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resize_volume( - volume='volume_value', - size_gib=844, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = 'volume_value' - assert arg == mock_val - arg = args[0].size_gib - mock_val = 844 - assert arg == mock_val - - -def test_resize_volume_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resize_volume( - gcb_volume.ResizeVolumeRequest(), - volume='volume_value', - size_gib=844, - ) - -@pytest.mark.asyncio -async def test_resize_volume_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.resize_volume( - volume='volume_value', - size_gib=844, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].volume - mock_val = 'volume_value' - assert arg == mock_val - arg = args[0].size_gib - mock_val = 844 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_resize_volume_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.resize_volume( - gcb_volume.ResizeVolumeRequest(), - volume='volume_value', - size_gib=844, - ) - - -@pytest.mark.parametrize("request_type", [ - network.ListNetworksRequest, - dict, -]) -def test_list_networks(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.ListNetworksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = network.ListNetworksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_networks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = network.ListNetworksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_networks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_networks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_networks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_networks] = mock_rpc - request = {} - client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_networks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_networks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_networks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_networks] = mock_rpc - - request = {} - await client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_networks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_networks_async(transport: str = 'grpc_asyncio', request_type=network.ListNetworksRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = network.ListNetworksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_networks_async_from_dict(): - await test_list_networks_async(request_type=dict) - -def test_list_networks_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.ListNetworksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - call.return_value = network.ListNetworksResponse() - client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_networks_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.ListNetworksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworksResponse()) - await client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_networks_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.ListNetworksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_networks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_networks_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_networks( - network.ListNetworksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_networks_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.ListNetworksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_networks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_networks_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_networks( - network.ListNetworksRequest(), - parent='parent_value', - ) - - -def test_list_networks_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token='abc', - ), - network.ListNetworksResponse( - networks=[], - next_page_token='def', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token='ghi', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_networks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, network.Network) - for i in results) -def test_list_networks_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token='abc', - ), - network.ListNetworksResponse( - networks=[], - next_page_token='def', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token='ghi', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - pages = list(client.list_networks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_networks_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token='abc', - ), - network.ListNetworksResponse( - networks=[], - next_page_token='def', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token='ghi', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_networks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, network.Network) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_networks_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token='abc', - ), - network.ListNetworksResponse( - networks=[], - next_page_token='def', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token='ghi', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_networks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - network.ListNetworkUsageRequest, - dict, -]) -def test_list_network_usage(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.ListNetworkUsageResponse( - ) - response = client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = network.ListNetworkUsageRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, network.ListNetworkUsageResponse) - - -def test_list_network_usage_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = network.ListNetworkUsageRequest( - location='location_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_network_usage(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == network.ListNetworkUsageRequest( - location='location_value', - ) - -def test_list_network_usage_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_network_usage in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_network_usage] = mock_rpc - request = {} - client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_network_usage(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_network_usage_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_network_usage in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_network_usage] = mock_rpc - - request = {} - await client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_network_usage(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_network_usage_async(transport: str = 'grpc_asyncio', request_type=network.ListNetworkUsageRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworkUsageResponse( - )) - response = await client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = network.ListNetworkUsageRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, network.ListNetworkUsageResponse) - - -@pytest.mark.asyncio -async def test_list_network_usage_async_from_dict(): - await test_list_network_usage_async(request_type=dict) - -def test_list_network_usage_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.ListNetworkUsageRequest() - - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - call.return_value = network.ListNetworkUsageResponse() - client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_network_usage_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.ListNetworkUsageRequest() - - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworkUsageResponse()) - await client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'location=location_value', - ) in kw['metadata'] - - -def test_list_network_usage_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.ListNetworkUsageResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_network_usage( - location='location_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = 'location_value' - assert arg == mock_val - - -def test_list_network_usage_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_network_usage( - network.ListNetworkUsageRequest(), - location='location_value', - ) - -@pytest.mark.asyncio -async def test_list_network_usage_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.ListNetworkUsageResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworkUsageResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_network_usage( - location='location_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].location - mock_val = 'location_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_network_usage_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_network_usage( - network.ListNetworkUsageRequest(), - location='location_value', - ) - - -@pytest.mark.parametrize("request_type", [ - network.GetNetworkRequest, - dict, -]) -def test_get_network(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - ) - response = client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = network.GetNetworkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == 'ip_address_value' - assert response.mac_address == ['mac_address_value'] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == 'vlan_id_value' - assert response.cidr == 'cidr_value' - assert response.services_cidr == 'services_cidr_value' - assert response.pod == 'pod_value' - assert response.jumbo_frames_enabled is True - assert response.gateway_ip == 'gateway_ip_value' - - -def test_get_network_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = network.GetNetworkRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_network(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == network.GetNetworkRequest( - name='name_value', - ) - -def test_get_network_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_network in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_network] = mock_rpc - request = {} - client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_network_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_network in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_network] = mock_rpc - - request = {} - await client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_network_async(transport: str = 'grpc_asyncio', request_type=network.GetNetworkRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - )) - response = await client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = network.GetNetworkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == 'ip_address_value' - assert response.mac_address == ['mac_address_value'] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == 'vlan_id_value' - assert response.cidr == 'cidr_value' - assert response.services_cidr == 'services_cidr_value' - assert response.pod == 'pod_value' - assert response.jumbo_frames_enabled is True - assert response.gateway_ip == 'gateway_ip_value' - - -@pytest.mark.asyncio -async def test_get_network_async_from_dict(): - await test_get_network_async(request_type=dict) - -def test_get_network_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.GetNetworkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - call.return_value = network.Network() - client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_network_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.GetNetworkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) - await client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_network_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_network( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_network_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_network( - network.GetNetworkRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_network_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_network( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_network_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_network( - network.GetNetworkRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_network.UpdateNetworkRequest, - dict, -]) -def test_update_network(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_network.UpdateNetworkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_network_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_network.UpdateNetworkRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_network(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_network.UpdateNetworkRequest( - ) - -def test_update_network_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_network in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_network] = mock_rpc - request = {} - client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_network_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_network in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_network] = mock_rpc - - request = {} - await client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_network_async(transport: str = 'grpc_asyncio', request_type=gcb_network.UpdateNetworkRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_network.UpdateNetworkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_network_async_from_dict(): - await test_update_network_async(request_type=dict) - -def test_update_network_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_network.UpdateNetworkRequest() - - request.network.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'network.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_network_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_network.UpdateNetworkRequest() - - request.network.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'network.name=name_value', - ) in kw['metadata'] - - -def test_update_network_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_network( - network=gcb_network.Network(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].network - mock_val = gcb_network.Network(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_network_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_network( - gcb_network.UpdateNetworkRequest(), - network=gcb_network.Network(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_network_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_network( - network=gcb_network.Network(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].network - mock_val = gcb_network.Network(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_network_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_network( - gcb_network.UpdateNetworkRequest(), - network=gcb_network.Network(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume_snapshot.CreateVolumeSnapshotRequest, - dict, -]) -def test_create_volume_snapshot(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - ) - response = client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_volume_snapshot.VolumeSnapshot) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.storage_volume == 'storage_volume_value' - assert response.type_ == gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC - - -def test_create_volume_snapshot_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_volume_snapshot(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume_snapshot.CreateVolumeSnapshotRequest( - parent='parent_value', - ) - -def test_create_volume_snapshot_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_volume_snapshot] = mock_rpc - request = {} - client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_volume_snapshot_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_volume_snapshot in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_volume_snapshot] = mock_rpc - - request = {} - await client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_volume_snapshot_async(transport: str = 'grpc_asyncio', request_type=gcb_volume_snapshot.CreateVolumeSnapshotRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - )) - response = await client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_volume_snapshot.VolumeSnapshot) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.storage_volume == 'storage_volume_value' - assert response.type_ == gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC - - -@pytest.mark.asyncio -async def test_create_volume_snapshot_async_from_dict(): - await test_create_volume_snapshot_async(request_type=dict) - -def test_create_volume_snapshot_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - call.return_value = gcb_volume_snapshot.VolumeSnapshot() - client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_volume_snapshot_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_volume_snapshot.VolumeSnapshot()) - await client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_volume_snapshot_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_volume_snapshot.VolumeSnapshot() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_volume_snapshot( - parent='parent_value', - volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].volume_snapshot - mock_val = gcb_volume_snapshot.VolumeSnapshot(name='name_value') - assert arg == mock_val - - -def test_create_volume_snapshot_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_volume_snapshot( - gcb_volume_snapshot.CreateVolumeSnapshotRequest(), - parent='parent_value', - volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_volume_snapshot_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_volume_snapshot.VolumeSnapshot() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_volume_snapshot.VolumeSnapshot()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_volume_snapshot( - parent='parent_value', - volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].volume_snapshot - mock_val = gcb_volume_snapshot.VolumeSnapshot(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_volume_snapshot_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_volume_snapshot( - gcb_volume_snapshot.CreateVolumeSnapshotRequest(), - parent='parent_value', - volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume_snapshot.RestoreVolumeSnapshotRequest, - dict, -]) -def test_restore_volume_snapshot(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_restore_volume_snapshot_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest( - volume_snapshot='volume_snapshot_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.restore_volume_snapshot(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_volume_snapshot.RestoreVolumeSnapshotRequest( - volume_snapshot='volume_snapshot_value', - ) - -def test_restore_volume_snapshot_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_volume_snapshot] = mock_rpc - request = {} - client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_volume_snapshot_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.restore_volume_snapshot in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.restore_volume_snapshot] = mock_rpc - - request = {} - await client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.restore_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_restore_volume_snapshot_async(transport: str = 'grpc_asyncio', request_type=gcb_volume_snapshot.RestoreVolumeSnapshotRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_restore_volume_snapshot_async_from_dict(): - await test_restore_volume_snapshot_async(request_type=dict) - -def test_restore_volume_snapshot_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - - request.volume_snapshot = 'volume_snapshot_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'volume_snapshot=volume_snapshot_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_restore_volume_snapshot_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - - request.volume_snapshot = 'volume_snapshot_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'volume_snapshot=volume_snapshot_value', - ) in kw['metadata'] - - -def test_restore_volume_snapshot_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.restore_volume_snapshot( - volume_snapshot='volume_snapshot_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].volume_snapshot - mock_val = 'volume_snapshot_value' - assert arg == mock_val - - -def test_restore_volume_snapshot_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_volume_snapshot( - gcb_volume_snapshot.RestoreVolumeSnapshotRequest(), - volume_snapshot='volume_snapshot_value', - ) - -@pytest.mark.asyncio -async def test_restore_volume_snapshot_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.restore_volume_snapshot( - volume_snapshot='volume_snapshot_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].volume_snapshot - mock_val = 'volume_snapshot_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_restore_volume_snapshot_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.restore_volume_snapshot( - gcb_volume_snapshot.RestoreVolumeSnapshotRequest(), - volume_snapshot='volume_snapshot_value', - ) - - -@pytest.mark.parametrize("request_type", [ - volume_snapshot.DeleteVolumeSnapshotRequest, - dict, -]) -def test_delete_volume_snapshot(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume_snapshot.DeleteVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_volume_snapshot_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume_snapshot.DeleteVolumeSnapshotRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_volume_snapshot(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume_snapshot.DeleteVolumeSnapshotRequest( - name='name_value', - ) - -def test_delete_volume_snapshot_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_volume_snapshot] = mock_rpc - request = {} - client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_volume_snapshot_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_volume_snapshot in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_volume_snapshot] = mock_rpc - - request = {} - await client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_volume_snapshot_async(transport: str = 'grpc_asyncio', request_type=volume_snapshot.DeleteVolumeSnapshotRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume_snapshot.DeleteVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_volume_snapshot_async_from_dict(): - await test_delete_volume_snapshot_async(request_type=dict) - -def test_delete_volume_snapshot_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume_snapshot.DeleteVolumeSnapshotRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - call.return_value = None - client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_volume_snapshot_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume_snapshot.DeleteVolumeSnapshotRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_volume_snapshot_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_volume_snapshot( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_volume_snapshot_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_volume_snapshot( - volume_snapshot.DeleteVolumeSnapshotRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_volume_snapshot_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_volume_snapshot( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_volume_snapshot_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_volume_snapshot( - volume_snapshot.DeleteVolumeSnapshotRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - volume_snapshot.GetVolumeSnapshotRequest, - dict, -]) -def test_get_volume_snapshot(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - ) - response = client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume_snapshot.GetVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, volume_snapshot.VolumeSnapshot) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.storage_volume == 'storage_volume_value' - assert response.type_ == volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC - - -def test_get_volume_snapshot_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume_snapshot.GetVolumeSnapshotRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_volume_snapshot(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume_snapshot.GetVolumeSnapshotRequest( - name='name_value', - ) - -def test_get_volume_snapshot_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_volume_snapshot] = mock_rpc - request = {} - client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_volume_snapshot_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_volume_snapshot in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_volume_snapshot] = mock_rpc - - request = {} - await client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_volume_snapshot_async(transport: str = 'grpc_asyncio', request_type=volume_snapshot.GetVolumeSnapshotRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - )) - response = await client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume_snapshot.GetVolumeSnapshotRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, volume_snapshot.VolumeSnapshot) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.storage_volume == 'storage_volume_value' - assert response.type_ == volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC - - -@pytest.mark.asyncio -async def test_get_volume_snapshot_async_from_dict(): - await test_get_volume_snapshot_async(request_type=dict) - -def test_get_volume_snapshot_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume_snapshot.GetVolumeSnapshotRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - call.return_value = volume_snapshot.VolumeSnapshot() - client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_volume_snapshot_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume_snapshot.GetVolumeSnapshotRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.VolumeSnapshot()) - await client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_volume_snapshot_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume_snapshot.VolumeSnapshot() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_volume_snapshot( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_volume_snapshot_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_volume_snapshot( - volume_snapshot.GetVolumeSnapshotRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_volume_snapshot_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume_snapshot.VolumeSnapshot() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.VolumeSnapshot()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_volume_snapshot( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_volume_snapshot_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_volume_snapshot( - volume_snapshot.GetVolumeSnapshotRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - volume_snapshot.ListVolumeSnapshotsRequest, - dict, -]) -def test_list_volume_snapshots(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume_snapshot.ListVolumeSnapshotsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = volume_snapshot.ListVolumeSnapshotsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumeSnapshotsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_volume_snapshots_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = volume_snapshot.ListVolumeSnapshotsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_volume_snapshots(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == volume_snapshot.ListVolumeSnapshotsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_volume_snapshots_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_volume_snapshots in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_volume_snapshots] = mock_rpc - request = {} - client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_volume_snapshots(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_volume_snapshots_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_volume_snapshots in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_volume_snapshots] = mock_rpc - - request = {} - await client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_volume_snapshots(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_volume_snapshots_async(transport: str = 'grpc_asyncio', request_type=volume_snapshot.ListVolumeSnapshotsRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.ListVolumeSnapshotsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = volume_snapshot.ListVolumeSnapshotsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumeSnapshotsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_volume_snapshots_async_from_dict(): - await test_list_volume_snapshots_async(request_type=dict) - -def test_list_volume_snapshots_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume_snapshot.ListVolumeSnapshotsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() - client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_volume_snapshots_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = volume_snapshot.ListVolumeSnapshotsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.ListVolumeSnapshotsResponse()) - await client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_volume_snapshots_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_volume_snapshots( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_volume_snapshots_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_volume_snapshots( - volume_snapshot.ListVolumeSnapshotsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_volume_snapshots_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.ListVolumeSnapshotsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_volume_snapshots( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_volume_snapshots_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_volume_snapshots( - volume_snapshot.ListVolumeSnapshotsRequest(), - parent='parent_value', - ) - - -def test_list_volume_snapshots_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='abc', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[], - next_page_token='def', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='ghi', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_volume_snapshots(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume_snapshot.VolumeSnapshot) - for i in results) -def test_list_volume_snapshots_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='abc', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[], - next_page_token='def', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='ghi', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - ), - RuntimeError, - ) - pages = list(client.list_volume_snapshots(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_volume_snapshots_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='abc', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[], - next_page_token='def', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='ghi', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_volume_snapshots(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, volume_snapshot.VolumeSnapshot) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_volume_snapshots_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='abc', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[], - next_page_token='def', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='ghi', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_volume_snapshots(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - lun.GetLunRequest, - dict, -]) -def test_get_lun(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = lun.Lun( - name='name_value', - id='id_value', - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume='storage_volume_value', - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid='wwid_value', - instances=['instances_value'], - ) - response = client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = lun.GetLunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, lun.Lun) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.state == lun.Lun.State.CREATING - assert response.size_gb == 739 - assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX - assert response.storage_volume == 'storage_volume_value' - assert response.shareable is True - assert response.boot_lun is True - assert response.storage_type == lun.Lun.StorageType.SSD - assert response.wwid == 'wwid_value' - assert response.instances == ['instances_value'] - - -def test_get_lun_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = lun.GetLunRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_lun(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == lun.GetLunRequest( - name='name_value', - ) - -def test_get_lun_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_lun in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_lun] = mock_rpc - request = {} - client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_lun in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_lun] = mock_rpc - - request = {} - await client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_lun_async(transport: str = 'grpc_asyncio', request_type=lun.GetLunRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun( - name='name_value', - id='id_value', - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume='storage_volume_value', - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid='wwid_value', - instances=['instances_value'], - )) - response = await client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = lun.GetLunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, lun.Lun) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.state == lun.Lun.State.CREATING - assert response.size_gb == 739 - assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX - assert response.storage_volume == 'storage_volume_value' - assert response.shareable is True - assert response.boot_lun is True - assert response.storage_type == lun.Lun.StorageType.SSD - assert response.wwid == 'wwid_value' - assert response.instances == ['instances_value'] - - -@pytest.mark.asyncio -async def test_get_lun_async_from_dict(): - await test_get_lun_async(request_type=dict) - -def test_get_lun_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = lun.GetLunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - call.return_value = lun.Lun() - client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_lun_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = lun.GetLunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun()) - await client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_lun_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = lun.Lun() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_lun( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_lun_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lun( - lun.GetLunRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_lun_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = lun.Lun() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_lun( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_lun_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_lun( - lun.GetLunRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - lun.ListLunsRequest, - dict, -]) -def test_list_luns(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = lun.ListLunsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = lun.ListLunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLunsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_luns_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = lun.ListLunsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_luns(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == lun.ListLunsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_luns_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_luns in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_luns] = mock_rpc - request = {} - client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_luns(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_luns_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_luns in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_luns] = mock_rpc - - request = {} - await client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_luns(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_luns_async(transport: str = 'grpc_asyncio', request_type=lun.ListLunsRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(lun.ListLunsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = lun.ListLunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLunsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_luns_async_from_dict(): - await test_list_luns_async(request_type=dict) - -def test_list_luns_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = lun.ListLunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - call.return_value = lun.ListLunsResponse() - client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_luns_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = lun.ListLunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.ListLunsResponse()) - await client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_luns_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = lun.ListLunsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_luns( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_luns_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_luns( - lun.ListLunsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_luns_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = lun.ListLunsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.ListLunsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_luns( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_luns_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_luns( - lun.ListLunsRequest(), - parent='parent_value', - ) - - -def test_list_luns_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token='abc', - ), - lun.ListLunsResponse( - luns=[], - next_page_token='def', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token='ghi', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_luns(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, lun.Lun) - for i in results) -def test_list_luns_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token='abc', - ), - lun.ListLunsResponse( - luns=[], - next_page_token='def', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token='ghi', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - pages = list(client.list_luns(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_luns_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token='abc', - ), - lun.ListLunsResponse( - luns=[], - next_page_token='def', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token='ghi', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_luns(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, lun.Lun) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_luns_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token='abc', - ), - lun.ListLunsResponse( - luns=[], - next_page_token='def', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token='ghi', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_luns(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - lun.EvictLunRequest, - dict, -]) -def test_evict_lun(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = lun.EvictLunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_evict_lun_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = lun.EvictLunRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.evict_lun(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == lun.EvictLunRequest( - name='name_value', - ) - -def test_evict_lun_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.evict_lun in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.evict_lun] = mock_rpc - request = {} - client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.evict_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_evict_lun_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.evict_lun in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.evict_lun] = mock_rpc - - request = {} - await client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.evict_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_evict_lun_async(transport: str = 'grpc_asyncio', request_type=lun.EvictLunRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = lun.EvictLunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_evict_lun_async_from_dict(): - await test_evict_lun_async(request_type=dict) - -def test_evict_lun_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = lun.EvictLunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_evict_lun_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = lun.EvictLunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_evict_lun_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.evict_lun( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_evict_lun_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.evict_lun( - lun.EvictLunRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_evict_lun_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.evict_lun( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_evict_lun_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.evict_lun( - lun.EvictLunRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.GetNfsShareRequest, - dict, -]) -def test_get_nfs_share(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - ) - response = client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = nfs_share.GetNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == 'name_value' - assert response.nfs_share_id == 'nfs_share_id_value' - assert response.id == 'id_value' - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == 'volume_value' - assert response.requested_size_gib == 1917 - assert response.storage_type == nfs_share.NfsShare.StorageType.SSD - - -def test_get_nfs_share_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = nfs_share.GetNfsShareRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_nfs_share(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.GetNfsShareRequest( - name='name_value', - ) - -def test_get_nfs_share_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_nfs_share] = mock_rpc - request = {} - client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_nfs_share_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_nfs_share in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_nfs_share] = mock_rpc - - request = {} - await client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_nfs_share_async(transport: str = 'grpc_asyncio', request_type=nfs_share.GetNfsShareRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - )) - response = await client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = nfs_share.GetNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == 'name_value' - assert response.nfs_share_id == 'nfs_share_id_value' - assert response.id == 'id_value' - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == 'volume_value' - assert response.requested_size_gib == 1917 - assert response.storage_type == nfs_share.NfsShare.StorageType.SSD - - -@pytest.mark.asyncio -async def test_get_nfs_share_async_from_dict(): - await test_get_nfs_share_async(request_type=dict) - -def test_get_nfs_share_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.GetNfsShareRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - call.return_value = nfs_share.NfsShare() - client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_nfs_share_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.GetNfsShareRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) - await client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_nfs_share_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_nfs_share( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_nfs_share_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_nfs_share( - nfs_share.GetNfsShareRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_nfs_share_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_nfs_share( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_nfs_share_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_nfs_share( - nfs_share.GetNfsShareRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.ListNfsSharesRequest, - dict, -]) -def test_list_nfs_shares(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.ListNfsSharesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = nfs_share.ListNfsSharesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNfsSharesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_nfs_shares_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = nfs_share.ListNfsSharesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_nfs_shares(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.ListNfsSharesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_nfs_shares_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_nfs_shares in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_nfs_shares] = mock_rpc - request = {} - client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_nfs_shares(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_nfs_shares_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_nfs_shares in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_nfs_shares] = mock_rpc - - request = {} - await client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_nfs_shares(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_nfs_shares_async(transport: str = 'grpc_asyncio', request_type=nfs_share.ListNfsSharesRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.ListNfsSharesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = nfs_share.ListNfsSharesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNfsSharesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_nfs_shares_async_from_dict(): - await test_list_nfs_shares_async(request_type=dict) - -def test_list_nfs_shares_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.ListNfsSharesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - call.return_value = nfs_share.ListNfsSharesResponse() - client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_nfs_shares_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.ListNfsSharesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.ListNfsSharesResponse()) - await client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_nfs_shares_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.ListNfsSharesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_nfs_shares( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_nfs_shares_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_nfs_shares( - nfs_share.ListNfsSharesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_nfs_shares_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.ListNfsSharesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.ListNfsSharesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_nfs_shares( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_nfs_shares_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_nfs_shares( - nfs_share.ListNfsSharesRequest(), - parent='parent_value', - ) - - -def test_list_nfs_shares_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - next_page_token='abc', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], - next_page_token='def', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - ], - next_page_token='ghi', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_nfs_shares(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, nfs_share.NfsShare) - for i in results) -def test_list_nfs_shares_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - next_page_token='abc', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], - next_page_token='def', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - ], - next_page_token='ghi', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - ), - RuntimeError, - ) - pages = list(client.list_nfs_shares(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_nfs_shares_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - next_page_token='abc', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], - next_page_token='def', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - ], - next_page_token='ghi', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_nfs_shares(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, nfs_share.NfsShare) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_nfs_shares_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - next_page_token='abc', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], - next_page_token='def', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - ], - next_page_token='ghi', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_nfs_shares(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - gcb_nfs_share.UpdateNfsShareRequest, - dict, -]) -def test_update_nfs_share(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_nfs_share.UpdateNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_nfs_share_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_nfs_share.UpdateNfsShareRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_nfs_share(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_nfs_share.UpdateNfsShareRequest( - ) - -def test_update_nfs_share_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_nfs_share] = mock_rpc - request = {} - client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_nfs_share_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_nfs_share in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_nfs_share] = mock_rpc - - request = {} - await client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_nfs_share_async(transport: str = 'grpc_asyncio', request_type=gcb_nfs_share.UpdateNfsShareRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_nfs_share.UpdateNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_nfs_share_async_from_dict(): - await test_update_nfs_share_async(request_type=dict) - -def test_update_nfs_share_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_nfs_share.UpdateNfsShareRequest() - - request.nfs_share.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'nfs_share.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_nfs_share_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_nfs_share.UpdateNfsShareRequest() - - request.nfs_share.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'nfs_share.name=name_value', - ) in kw['metadata'] - - -def test_update_nfs_share_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_nfs_share( - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].nfs_share - mock_val = gcb_nfs_share.NfsShare(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_nfs_share_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_nfs_share( - gcb_nfs_share.UpdateNfsShareRequest(), - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_nfs_share_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_nfs_share( - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].nfs_share - mock_val = gcb_nfs_share.NfsShare(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_nfs_share_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_nfs_share( - gcb_nfs_share.UpdateNfsShareRequest(), - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - gcb_nfs_share.CreateNfsShareRequest, - dict, -]) -def test_create_nfs_share(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcb_nfs_share.CreateNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_nfs_share_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcb_nfs_share.CreateNfsShareRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_nfs_share(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcb_nfs_share.CreateNfsShareRequest( - parent='parent_value', - ) - -def test_create_nfs_share_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_nfs_share] = mock_rpc - request = {} - client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_nfs_share_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_nfs_share in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_nfs_share] = mock_rpc - - request = {} - await client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_nfs_share_async(transport: str = 'grpc_asyncio', request_type=gcb_nfs_share.CreateNfsShareRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcb_nfs_share.CreateNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_nfs_share_async_from_dict(): - await test_create_nfs_share_async(request_type=dict) - -def test_create_nfs_share_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_nfs_share.CreateNfsShareRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_nfs_share_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcb_nfs_share.CreateNfsShareRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_nfs_share_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_nfs_share( - parent='parent_value', - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].nfs_share - mock_val = gcb_nfs_share.NfsShare(name='name_value') - assert arg == mock_val - - -def test_create_nfs_share_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_nfs_share( - gcb_nfs_share.CreateNfsShareRequest(), - parent='parent_value', - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_nfs_share_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_nfs_share( - parent='parent_value', - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].nfs_share - mock_val = gcb_nfs_share.NfsShare(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_nfs_share_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_nfs_share( - gcb_nfs_share.CreateNfsShareRequest(), - parent='parent_value', - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.RenameNfsShareRequest, - dict, -]) -def test_rename_nfs_share(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - ) - response = client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = nfs_share.RenameNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == 'name_value' - assert response.nfs_share_id == 'nfs_share_id_value' - assert response.id == 'id_value' - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == 'volume_value' - assert response.requested_size_gib == 1917 - assert response.storage_type == nfs_share.NfsShare.StorageType.SSD - - -def test_rename_nfs_share_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = nfs_share.RenameNfsShareRequest( - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_nfs_share(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.RenameNfsShareRequest( - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - -def test_rename_nfs_share_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_nfs_share] = mock_rpc - request = {} - client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_nfs_share_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_nfs_share in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_nfs_share] = mock_rpc - - request = {} - await client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_nfs_share_async(transport: str = 'grpc_asyncio', request_type=nfs_share.RenameNfsShareRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - )) - response = await client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = nfs_share.RenameNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == 'name_value' - assert response.nfs_share_id == 'nfs_share_id_value' - assert response.id == 'id_value' - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == 'volume_value' - assert response.requested_size_gib == 1917 - assert response.storage_type == nfs_share.NfsShare.StorageType.SSD - - -@pytest.mark.asyncio -async def test_rename_nfs_share_async_from_dict(): - await test_rename_nfs_share_async(request_type=dict) - -def test_rename_nfs_share_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.RenameNfsShareRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - call.return_value = nfs_share.NfsShare() - client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_nfs_share_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.RenameNfsShareRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) - await client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_nfs_share_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_nfs_share( - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_nfsshare_id - mock_val = 'new_nfsshare_id_value' - assert arg == mock_val - - -def test_rename_nfs_share_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_nfs_share( - nfs_share.RenameNfsShareRequest(), - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_nfs_share_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = nfs_share.NfsShare() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_nfs_share( - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_nfsshare_id - mock_val = 'new_nfsshare_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_nfs_share_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_nfs_share( - nfs_share.RenameNfsShareRequest(), - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.DeleteNfsShareRequest, - dict, -]) -def test_delete_nfs_share(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = nfs_share.DeleteNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_nfs_share_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = nfs_share.DeleteNfsShareRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_nfs_share(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == nfs_share.DeleteNfsShareRequest( - name='name_value', - ) - -def test_delete_nfs_share_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_nfs_share] = mock_rpc - request = {} - client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_nfs_share_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_nfs_share in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_nfs_share] = mock_rpc - - request = {} - await client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_nfs_share_async(transport: str = 'grpc_asyncio', request_type=nfs_share.DeleteNfsShareRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = nfs_share.DeleteNfsShareRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_nfs_share_async_from_dict(): - await test_delete_nfs_share_async(request_type=dict) - -def test_delete_nfs_share_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.DeleteNfsShareRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_nfs_share_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = nfs_share.DeleteNfsShareRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_nfs_share_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_nfs_share( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_nfs_share_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_nfs_share( - nfs_share.DeleteNfsShareRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_nfs_share_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_nfs_share( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_nfs_share_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_nfs_share( - nfs_share.DeleteNfsShareRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - provisioning.ListProvisioningQuotasRequest, - dict, -]) -def test_list_provisioning_quotas(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ListProvisioningQuotasResponse( - next_page_token='next_page_token_value', - ) - response = client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning.ListProvisioningQuotasRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProvisioningQuotasPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_provisioning_quotas_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning.ListProvisioningQuotasRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_provisioning_quotas(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning.ListProvisioningQuotasRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_provisioning_quotas_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_provisioning_quotas in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_provisioning_quotas] = mock_rpc - request = {} - client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_provisioning_quotas(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_provisioning_quotas in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_provisioning_quotas] = mock_rpc - - request = {} - await client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_provisioning_quotas(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_async(transport: str = 'grpc_asyncio', request_type=provisioning.ListProvisioningQuotasRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ListProvisioningQuotasResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning.ListProvisioningQuotasRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProvisioningQuotasAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_async_from_dict(): - await test_list_provisioning_quotas_async(request_type=dict) - -def test_list_provisioning_quotas_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.ListProvisioningQuotasRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - call.return_value = provisioning.ListProvisioningQuotasResponse() - client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.ListProvisioningQuotasRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ListProvisioningQuotasResponse()) - await client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_provisioning_quotas_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ListProvisioningQuotasResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_provisioning_quotas( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_provisioning_quotas_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_provisioning_quotas( - provisioning.ListProvisioningQuotasRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ListProvisioningQuotasResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ListProvisioningQuotasResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_provisioning_quotas( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_provisioning_quotas( - provisioning.ListProvisioningQuotasRequest(), - parent='parent_value', - ) - - -def test_list_provisioning_quotas_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - next_page_token='abc', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[], - next_page_token='def', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - ], - next_page_token='ghi', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_provisioning_quotas(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, provisioning.ProvisioningQuota) - for i in results) -def test_list_provisioning_quotas_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - next_page_token='abc', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[], - next_page_token='def', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - ], - next_page_token='ghi', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - ), - RuntimeError, - ) - pages = list(client.list_provisioning_quotas(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - next_page_token='abc', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[], - next_page_token='def', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - ], - next_page_token='ghi', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_provisioning_quotas(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, provisioning.ProvisioningQuota) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_provisioning_quotas_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - next_page_token='abc', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[], - next_page_token='def', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - ], - next_page_token='ghi', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_provisioning_quotas(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - provisioning.SubmitProvisioningConfigRequest, - dict, -]) -def test_submit_provisioning_config(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.SubmitProvisioningConfigResponse( - ) - response = client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning.SubmitProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.SubmitProvisioningConfigResponse) - - -def test_submit_provisioning_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning.SubmitProvisioningConfigRequest( - parent='parent_value', - email='email_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.submit_provisioning_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning.SubmitProvisioningConfigRequest( - parent='parent_value', - email='email_value', - ) - -def test_submit_provisioning_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.submit_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.submit_provisioning_config] = mock_rpc - request = {} - client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.submit_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_submit_provisioning_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.submit_provisioning_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.submit_provisioning_config] = mock_rpc - - request = {} - await client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.submit_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_submit_provisioning_config_async(transport: str = 'grpc_asyncio', request_type=provisioning.SubmitProvisioningConfigRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(provisioning.SubmitProvisioningConfigResponse( - )) - response = await client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning.SubmitProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.SubmitProvisioningConfigResponse) - - -@pytest.mark.asyncio -async def test_submit_provisioning_config_async_from_dict(): - await test_submit_provisioning_config_async(request_type=dict) - -def test_submit_provisioning_config_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.SubmitProvisioningConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - call.return_value = provisioning.SubmitProvisioningConfigResponse() - client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_submit_provisioning_config_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.SubmitProvisioningConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.SubmitProvisioningConfigResponse()) - await client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_submit_provisioning_config_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.SubmitProvisioningConfigResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.submit_provisioning_config( - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].provisioning_config - mock_val = provisioning.ProvisioningConfig(name='name_value') - assert arg == mock_val - - -def test_submit_provisioning_config_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.submit_provisioning_config( - provisioning.SubmitProvisioningConfigRequest(), - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_submit_provisioning_config_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.SubmitProvisioningConfigResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.SubmitProvisioningConfigResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.submit_provisioning_config( - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].provisioning_config - mock_val = provisioning.ProvisioningConfig(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_submit_provisioning_config_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.submit_provisioning_config( - provisioning.SubmitProvisioningConfigRequest(), - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - provisioning.GetProvisioningConfigRequest, - dict, -]) -def test_get_provisioning_config(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - ) - response = client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning.GetProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -def test_get_provisioning_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning.GetProvisioningConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_provisioning_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning.GetProvisioningConfigRequest( - name='name_value', - ) - -def test_get_provisioning_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_provisioning_config] = mock_rpc - request = {} - client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_provisioning_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_provisioning_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_provisioning_config] = mock_rpc - - request = {} - await client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_provisioning_config_async(transport: str = 'grpc_asyncio', request_type=provisioning.GetProvisioningConfigRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - )) - response = await client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning.GetProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -@pytest.mark.asyncio -async def test_get_provisioning_config_async_from_dict(): - await test_get_provisioning_config_async(request_type=dict) - -def test_get_provisioning_config_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.GetProvisioningConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - call.return_value = provisioning.ProvisioningConfig() - client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_provisioning_config_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.GetProvisioningConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig()) - await client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_provisioning_config_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_provisioning_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_provisioning_config_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_provisioning_config( - provisioning.GetProvisioningConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_provisioning_config_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_provisioning_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_provisioning_config_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_provisioning_config( - provisioning.GetProvisioningConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - provisioning.CreateProvisioningConfigRequest, - dict, -]) -def test_create_provisioning_config(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - ) - response = client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning.CreateProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -def test_create_provisioning_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning.CreateProvisioningConfigRequest( - parent='parent_value', - email='email_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_provisioning_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning.CreateProvisioningConfigRequest( - parent='parent_value', - email='email_value', - ) - -def test_create_provisioning_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_provisioning_config] = mock_rpc - request = {} - client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_provisioning_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_provisioning_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_provisioning_config] = mock_rpc - - request = {} - await client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_provisioning_config_async(transport: str = 'grpc_asyncio', request_type=provisioning.CreateProvisioningConfigRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - )) - response = await client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning.CreateProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -@pytest.mark.asyncio -async def test_create_provisioning_config_async_from_dict(): - await test_create_provisioning_config_async(request_type=dict) - -def test_create_provisioning_config_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.CreateProvisioningConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - call.return_value = provisioning.ProvisioningConfig() - client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_provisioning_config_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.CreateProvisioningConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig()) - await client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_provisioning_config_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_provisioning_config( - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].provisioning_config - mock_val = provisioning.ProvisioningConfig(name='name_value') - assert arg == mock_val - - -def test_create_provisioning_config_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_provisioning_config( - provisioning.CreateProvisioningConfigRequest(), - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_provisioning_config_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_provisioning_config( - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].provisioning_config - mock_val = provisioning.ProvisioningConfig(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_provisioning_config_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_provisioning_config( - provisioning.CreateProvisioningConfigRequest(), - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - provisioning.UpdateProvisioningConfigRequest, - dict, -]) -def test_update_provisioning_config(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - ) - response = client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = provisioning.UpdateProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -def test_update_provisioning_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = provisioning.UpdateProvisioningConfigRequest( - email='email_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_provisioning_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == provisioning.UpdateProvisioningConfigRequest( - email='email_value', - ) - -def test_update_provisioning_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_provisioning_config] = mock_rpc - request = {} - client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_provisioning_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_provisioning_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_provisioning_config] = mock_rpc - - request = {} - await client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_provisioning_config_async(transport: str = 'grpc_asyncio', request_type=provisioning.UpdateProvisioningConfigRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - )) - response = await client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = provisioning.UpdateProvisioningConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -@pytest.mark.asyncio -async def test_update_provisioning_config_async_from_dict(): - await test_update_provisioning_config_async(request_type=dict) - -def test_update_provisioning_config_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.UpdateProvisioningConfigRequest() - - request.provisioning_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - call.return_value = provisioning.ProvisioningConfig() - client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'provisioning_config.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_provisioning_config_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = provisioning.UpdateProvisioningConfigRequest() - - request.provisioning_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig()) - await client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'provisioning_config.name=name_value', - ) in kw['metadata'] - - -def test_update_provisioning_config_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_provisioning_config( - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].provisioning_config - mock_val = provisioning.ProvisioningConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_provisioning_config_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_provisioning_config( - provisioning.UpdateProvisioningConfigRequest(), - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_provisioning_config_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = provisioning.ProvisioningConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_provisioning_config( - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].provisioning_config - mock_val = provisioning.ProvisioningConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_provisioning_config_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_provisioning_config( - provisioning.UpdateProvisioningConfigRequest(), - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - network.RenameNetworkRequest, - dict, -]) -def test_rename_network(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - ) - response = client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = network.RenameNetworkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == 'ip_address_value' - assert response.mac_address == ['mac_address_value'] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == 'vlan_id_value' - assert response.cidr == 'cidr_value' - assert response.services_cidr == 'services_cidr_value' - assert response.pod == 'pod_value' - assert response.jumbo_frames_enabled is True - assert response.gateway_ip == 'gateway_ip_value' - - -def test_rename_network_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = network.RenameNetworkRequest( - name='name_value', - new_network_id='new_network_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_network(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == network.RenameNetworkRequest( - name='name_value', - new_network_id='new_network_id_value', - ) - -def test_rename_network_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_network in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_network] = mock_rpc - request = {} - client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_network_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_network in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_network] = mock_rpc - - request = {} - await client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_network_async(transport: str = 'grpc_asyncio', request_type=network.RenameNetworkRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - )) - response = await client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = network.RenameNetworkRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == 'ip_address_value' - assert response.mac_address == ['mac_address_value'] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == 'vlan_id_value' - assert response.cidr == 'cidr_value' - assert response.services_cidr == 'services_cidr_value' - assert response.pod == 'pod_value' - assert response.jumbo_frames_enabled is True - assert response.gateway_ip == 'gateway_ip_value' - - -@pytest.mark.asyncio -async def test_rename_network_async_from_dict(): - await test_rename_network_async(request_type=dict) - -def test_rename_network_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.RenameNetworkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - call.return_value = network.Network() - client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_network_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = network.RenameNetworkRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) - await client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_network_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_network( - name='name_value', - new_network_id='new_network_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_network_id - mock_val = 'new_network_id_value' - assert arg == mock_val - - -def test_rename_network_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_network( - network.RenameNetworkRequest(), - name='name_value', - new_network_id='new_network_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_network_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = network.Network() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_network( - name='name_value', - new_network_id='new_network_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_network_id - mock_val = 'new_network_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_network_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_network( - network.RenameNetworkRequest(), - name='name_value', - new_network_id='new_network_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - osimage.ListOSImagesRequest, - dict, -]) -def test_list_os_images(request_type, transport: str = 'grpc'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = osimage.ListOSImagesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = osimage.ListOSImagesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOSImagesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_os_images_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = osimage.ListOSImagesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_os_images(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == osimage.ListOSImagesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_os_images_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_os_images in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_os_images] = mock_rpc - request = {} - client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_os_images(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_os_images_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_os_images in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_os_images] = mock_rpc - - request = {} - await client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_os_images(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_os_images_async(transport: str = 'grpc_asyncio', request_type=osimage.ListOSImagesRequest): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(osimage.ListOSImagesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = osimage.ListOSImagesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOSImagesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_os_images_async_from_dict(): - await test_list_os_images_async(request_type=dict) - -def test_list_os_images_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = osimage.ListOSImagesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - call.return_value = osimage.ListOSImagesResponse() - client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_os_images_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = osimage.ListOSImagesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(osimage.ListOSImagesResponse()) - await client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_os_images_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = osimage.ListOSImagesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_os_images( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_os_images_flattened_error(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_os_images( - osimage.ListOSImagesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_os_images_flattened_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = osimage.ListOSImagesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(osimage.ListOSImagesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_os_images( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_os_images_flattened_error_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_os_images( - osimage.ListOSImagesRequest(), - parent='parent_value', - ) - - -def test_list_os_images_pager(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - osimage.OSImage(), - ], - next_page_token='abc', - ), - osimage.ListOSImagesResponse( - os_images=[], - next_page_token='def', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - ], - next_page_token='ghi', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_os_images(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, osimage.OSImage) - for i in results) -def test_list_os_images_pages(transport_name: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - osimage.OSImage(), - ], - next_page_token='abc', - ), - osimage.ListOSImagesResponse( - os_images=[], - next_page_token='def', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - ], - next_page_token='ghi', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - ], - ), - RuntimeError, - ) - pages = list(client.list_os_images(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_os_images_async_pager(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - osimage.OSImage(), - ], - next_page_token='abc', - ), - osimage.ListOSImagesResponse( - os_images=[], - next_page_token='def', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - ], - next_page_token='ghi', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_os_images(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, osimage.OSImage) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_os_images_async_pages(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - osimage.OSImage(), - ], - next_page_token='abc', - ), - osimage.ListOSImagesResponse( - os_images=[], - next_page_token='def', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - ], - next_page_token='ghi', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_os_images(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_instances_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_instances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - - request = {} - client.list_instances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_instances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_instances_rest_required_fields(request_type=instance.ListInstancesRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = instance.ListInstancesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = instance.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_instances(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_instances_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_instances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_instances_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = instance.ListInstancesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = instance.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_instances(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) - - -def test_list_instances_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_instances( - instance.ListInstancesRequest(), - parent='parent_value', - ) - - -def test_list_instances_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - instance.Instance(), - ], - next_page_token='abc', - ), - instance.ListInstancesResponse( - instances=[], - next_page_token='def', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - ], - next_page_token='ghi', - ), - instance.ListInstancesResponse( - instances=[ - instance.Instance(), - instance.Instance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(instance.ListInstancesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_instances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, instance.Instance) - for i in results) - - pages = list(client.list_instances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - - request = {} - client.get_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_instance_rest_required_fields(request_type=instance.GetInstanceRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = instance.Instance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_instance_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_instance_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = instance.Instance() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) - - -def test_get_instance_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_instance( - instance.GetInstanceRequest(), - name='name_value', - ) - - -def test_update_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - - request = {} - client.update_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_instance_rest_required_fields(request_type=gcb_instance.UpdateInstanceRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_instance_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("instance", ))) - - -def test_update_instance_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - instance=gcb_instance.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) - - -def test_update_instance_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_instance( - gcb_instance.UpdateInstanceRequest(), - instance=gcb_instance.Instance(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_rename_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_instance] = mock_rpc - - request = {} - client.rename_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_instance_rest_required_fields(request_type=instance.RenameInstanceRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_instance_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newInstanceId"] = 'new_instance_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newInstanceId" in jsonified_request - assert jsonified_request["newInstanceId"] == 'new_instance_id_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = instance.Instance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_instance_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newInstanceId", ))) - - -def test_rename_instance_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = instance.Instance() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_instance_id='new_instance_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}:rename" % client.transport._host, args[1]) - - -def test_rename_instance_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_instance( - instance.RenameInstanceRequest(), - name='name_value', - new_instance_id='new_instance_id_value', - ) - - -def test_reset_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.reset_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.reset_instance] = mock_rpc - - request = {} - client.reset_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.reset_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_reset_instance_rest_required_fields(request_type=instance.ResetInstanceRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reset_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reset_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.reset_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_reset_instance_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.reset_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_reset_instance_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.reset_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}:reset" % client.transport._host, args[1]) - - -def test_reset_instance_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.reset_instance( - instance.ResetInstanceRequest(), - name='name_value', - ) - - -def test_start_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.start_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.start_instance] = mock_rpc - - request = {} - client.start_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.start_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_start_instance_rest_required_fields(request_type=instance.StartInstanceRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.start_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_start_instance_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.start_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_start_instance_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.start_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}:start" % client.transport._host, args[1]) - - -def test_start_instance_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.start_instance( - instance.StartInstanceRequest(), - name='name_value', - ) - - -def test_stop_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.stop_instance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.stop_instance] = mock_rpc - - request = {} - client.stop_instance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.stop_instance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_stop_instance_rest_required_fields(request_type=instance.StopInstanceRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).stop_instance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.stop_instance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_stop_instance_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.stop_instance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_stop_instance_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.stop_instance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}:stop" % client.transport._host, args[1]) - - -def test_stop_instance_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.stop_instance( - instance.StopInstanceRequest(), - name='name_value', - ) - - -def test_enable_interactive_serial_console_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enable_interactive_serial_console in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.enable_interactive_serial_console] = mock_rpc - - request = {} - client.enable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.enable_interactive_serial_console(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_enable_interactive_serial_console_rest_required_fields(request_type=instance.EnableInteractiveSerialConsoleRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_interactive_serial_console._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enable_interactive_serial_console._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.enable_interactive_serial_console(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_enable_interactive_serial_console_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.enable_interactive_serial_console._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_enable_interactive_serial_console_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.enable_interactive_serial_console(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}:enableInteractiveSerialConsole" % client.transport._host, args[1]) - - -def test_enable_interactive_serial_console_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.enable_interactive_serial_console( - instance.EnableInteractiveSerialConsoleRequest(), - name='name_value', - ) - - -def test_disable_interactive_serial_console_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.disable_interactive_serial_console in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.disable_interactive_serial_console] = mock_rpc - - request = {} - client.disable_interactive_serial_console(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.disable_interactive_serial_console(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_disable_interactive_serial_console_rest_required_fields(request_type=instance.DisableInteractiveSerialConsoleRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_interactive_serial_console._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).disable_interactive_serial_console._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.disable_interactive_serial_console(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_disable_interactive_serial_console_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.disable_interactive_serial_console._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_disable_interactive_serial_console_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.disable_interactive_serial_console(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/instances/*}:disableInteractiveSerialConsole" % client.transport._host, args[1]) - - -def test_disable_interactive_serial_console_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.disable_interactive_serial_console( - instance.DisableInteractiveSerialConsoleRequest(), - name='name_value', - ) - - -def test_detach_lun_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.detach_lun in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.detach_lun] = mock_rpc - - request = {} - client.detach_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.detach_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_detach_lun_rest_required_fields(request_type=gcb_instance.DetachLunRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["instance"] = "" - request_init["lun"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_lun._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["instance"] = 'instance_value' - jsonified_request["lun"] = 'lun_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).detach_lun._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "instance" in jsonified_request - assert jsonified_request["instance"] == 'instance_value' - assert "lun" in jsonified_request - assert jsonified_request["lun"] == 'lun_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.detach_lun(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_detach_lun_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.detach_lun._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("instance", "lun", ))) - - -def test_detach_lun_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'instance': 'projects/sample1/locations/sample2/instances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - instance='instance_value', - lun='lun_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.detach_lun(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{instance=projects/*/locations/*/instances/*}:detachLun" % client.transport._host, args[1]) - - -def test_detach_lun_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.detach_lun( - gcb_instance.DetachLunRequest(), - instance='instance_value', - lun='lun_value', - ) - - -def test_list_ssh_keys_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_ssh_keys in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_ssh_keys] = mock_rpc - - request = {} - client.list_ssh_keys(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_ssh_keys(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_ssh_keys_rest_required_fields(request_type=ssh_key.ListSSHKeysRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_ssh_keys._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_ssh_keys._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = ssh_key.ListSSHKeysResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ssh_key.ListSSHKeysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_ssh_keys(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_ssh_keys_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_ssh_keys._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_ssh_keys_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = ssh_key.ListSSHKeysResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = ssh_key.ListSSHKeysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_ssh_keys(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/sshKeys" % client.transport._host, args[1]) - - -def test_list_ssh_keys_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_ssh_keys( - ssh_key.ListSSHKeysRequest(), - parent='parent_value', - ) - - -def test_list_ssh_keys_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - next_page_token='abc', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[], - next_page_token='def', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ], - next_page_token='ghi', - ), - ssh_key.ListSSHKeysResponse( - ssh_keys=[ - ssh_key.SSHKey(), - ssh_key.SSHKey(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(ssh_key.ListSSHKeysResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_ssh_keys(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, ssh_key.SSHKey) - for i in results) - - pages = list(client.list_ssh_keys(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_ssh_key_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_ssh_key in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_ssh_key] = mock_rpc - - request = {} - client.create_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_ssh_key(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_ssh_key_rest_required_fields(request_type=gcb_ssh_key.CreateSSHKeyRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["ssh_key_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "sshKeyId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_ssh_key._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "sshKeyId" in jsonified_request - assert jsonified_request["sshKeyId"] == request_init["ssh_key_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["sshKeyId"] = 'ssh_key_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_ssh_key._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("ssh_key_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "sshKeyId" in jsonified_request - assert jsonified_request["sshKeyId"] == 'ssh_key_id_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_ssh_key.SSHKey() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_ssh_key.SSHKey.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_ssh_key(request) - - expected_params = [ - ( - "sshKeyId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_ssh_key_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_ssh_key._get_unset_required_fields({}) - assert set(unset_fields) == (set(("sshKeyId", )) & set(("parent", "sshKey", "sshKeyId", ))) - - -def test_create_ssh_key_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_ssh_key.SSHKey() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ssh_key=gcb_ssh_key.SSHKey(name='name_value'), - ssh_key_id='ssh_key_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_ssh_key.SSHKey.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_ssh_key(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/sshKeys" % client.transport._host, args[1]) - - -def test_create_ssh_key_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_ssh_key( - gcb_ssh_key.CreateSSHKeyRequest(), - parent='parent_value', - ssh_key=gcb_ssh_key.SSHKey(name='name_value'), - ssh_key_id='ssh_key_id_value', - ) - - -def test_delete_ssh_key_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_ssh_key in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_ssh_key] = mock_rpc - - request = {} - client.delete_ssh_key(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_ssh_key(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_ssh_key_rest_required_fields(request_type=ssh_key.DeleteSSHKeyRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_ssh_key._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_ssh_key._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_ssh_key(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_ssh_key_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_ssh_key._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_ssh_key_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/sshKeys/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_ssh_key(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/sshKeys/*}" % client.transport._host, args[1]) - - -def test_delete_ssh_key_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_ssh_key( - ssh_key.DeleteSSHKeyRequest(), - name='name_value', - ) - - -def test_list_volumes_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_volumes in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_volumes] = mock_rpc - - request = {} - client.list_volumes(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_volumes(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_volumes_rest_required_fields(request_type=volume.ListVolumesRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_volumes._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_volumes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_volumes(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_volumes_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_volumes._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_volumes_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_volumes(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/volumes" % client.transport._host, args[1]) - - -def test_list_volumes_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_volumes( - volume.ListVolumesRequest(), - parent='parent_value', - ) - - -def test_list_volumes_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - volume.Volume(), - ], - next_page_token='abc', - ), - volume.ListVolumesResponse( - volumes=[], - next_page_token='def', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - ], - next_page_token='ghi', - ), - volume.ListVolumesResponse( - volumes=[ - volume.Volume(), - volume.Volume(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(volume.ListVolumesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_volumes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume.Volume) - for i in results) - - pages = list(client.list_volumes(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_volume_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_volume] = mock_rpc - - request = {} - client.get_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_volume_rest_required_fields(request_type=volume.GetVolumeRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = volume.Volume() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_volume(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_volume_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_volume_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume.Volume() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_volume(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*}" % client.transport._host, args[1]) - - -def test_get_volume_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_volume( - volume.GetVolumeRequest(), - name='name_value', - ) - - -def test_update_volume_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_volume] = mock_rpc - - request = {} - client.update_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_volume_rest_required_fields(request_type=gcb_volume.UpdateVolumeRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_volume._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_volume(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_volume_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("volume", ))) - - -def test_update_volume_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'volume': {'name': 'projects/sample1/locations/sample2/volumes/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - volume=gcb_volume.Volume(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_volume(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{volume.name=projects/*/locations/*/volumes/*}" % client.transport._host, args[1]) - - -def test_update_volume_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_volume( - gcb_volume.UpdateVolumeRequest(), - volume=gcb_volume.Volume(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_rename_volume_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_volume] = mock_rpc - - request = {} - client.rename_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_volume_rest_required_fields(request_type=volume.RenameVolumeRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_volume_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newVolumeId"] = 'new_volume_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newVolumeId" in jsonified_request - assert jsonified_request["newVolumeId"] == 'new_volume_id_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = volume.Volume() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_volume(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_volume_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newVolumeId", ))) - - -def test_rename_volume_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume.Volume() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_volume_id='new_volume_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_volume(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*}:rename" % client.transport._host, args[1]) - - -def test_rename_volume_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_volume( - volume.RenameVolumeRequest(), - name='name_value', - new_volume_id='new_volume_id_value', - ) - - -def test_evict_volume_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.evict_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.evict_volume] = mock_rpc - - request = {} - client.evict_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.evict_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_evict_volume_rest_required_fields(request_type=volume.EvictVolumeRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).evict_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).evict_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.evict_volume(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_evict_volume_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.evict_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_evict_volume_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.evict_volume(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*}:evict" % client.transport._host, args[1]) - - -def test_evict_volume_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.evict_volume( - volume.EvictVolumeRequest(), - name='name_value', - ) - - -def test_resize_volume_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.resize_volume in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.resize_volume] = mock_rpc - - request = {} - client.resize_volume(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.resize_volume(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_resize_volume_rest_required_fields(request_type=gcb_volume.ResizeVolumeRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["volume"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["volume"] = 'volume_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resize_volume._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "volume" in jsonified_request - assert jsonified_request["volume"] == 'volume_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.resize_volume(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_resize_volume_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.resize_volume._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("volume", ))) - - -def test_resize_volume_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'volume': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - volume='volume_value', - size_gib=844, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.resize_volume(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{volume=projects/*/locations/*/volumes/*}:resize" % client.transport._host, args[1]) - - -def test_resize_volume_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resize_volume( - gcb_volume.ResizeVolumeRequest(), - volume='volume_value', - size_gib=844, - ) - - -def test_list_networks_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_networks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_networks] = mock_rpc - - request = {} - client.list_networks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_networks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_networks_rest_required_fields(request_type=network.ListNetworksRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_networks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_networks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = network.ListNetworksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.ListNetworksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_networks(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_networks_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_networks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_networks_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.ListNetworksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = network.ListNetworksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_networks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/networks" % client.transport._host, args[1]) - - -def test_list_networks_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_networks( - network.ListNetworksRequest(), - parent='parent_value', - ) - - -def test_list_networks_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - network.Network(), - ], - next_page_token='abc', - ), - network.ListNetworksResponse( - networks=[], - next_page_token='def', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - ], - next_page_token='ghi', - ), - network.ListNetworksResponse( - networks=[ - network.Network(), - network.Network(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(network.ListNetworksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_networks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, network.Network) - for i in results) - - pages = list(client.list_networks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_network_usage_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_network_usage in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_network_usage] = mock_rpc - - request = {} - client.list_network_usage(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_network_usage(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_network_usage_rest_required_fields(request_type=network.ListNetworkUsageRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["location"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_network_usage._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["location"] = 'location_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_network_usage._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "location" in jsonified_request - assert jsonified_request["location"] == 'location_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = network.ListNetworkUsageResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.ListNetworkUsageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_network_usage(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_network_usage_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_network_usage._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("location", ))) - - -def test_list_network_usage_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.ListNetworkUsageResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'location': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - location='location_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = network.ListNetworkUsageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_network_usage(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{location=projects/*/locations/*}/networks:listNetworkUsage" % client.transport._host, args[1]) - - -def test_list_network_usage_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_network_usage( - network.ListNetworkUsageRequest(), - location='location_value', - ) - - -def test_get_network_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_network in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_network] = mock_rpc - - request = {} - client.get_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_network_rest_required_fields(request_type=network.GetNetworkRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_network._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_network._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = network.Network() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.Network.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_network(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_network_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_network._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_network_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.Network() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/networks/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = network.Network.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_network(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/networks/*}" % client.transport._host, args[1]) - - -def test_get_network_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_network( - network.GetNetworkRequest(), - name='name_value', - ) - - -def test_update_network_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_network in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_network] = mock_rpc - - request = {} - client.update_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_network_rest_required_fields(request_type=gcb_network.UpdateNetworkRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_network._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_network._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_network(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_network_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_network._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("network", ))) - - -def test_update_network_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'network': {'name': 'projects/sample1/locations/sample2/networks/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - network=gcb_network.Network(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_network(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{network.name=projects/*/locations/*/networks/*}" % client.transport._host, args[1]) - - -def test_update_network_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_network( - gcb_network.UpdateNetworkRequest(), - network=gcb_network.Network(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_create_volume_snapshot_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_volume_snapshot] = mock_rpc - - request = {} - client.create_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_volume_snapshot_rest_required_fields(request_type=gcb_volume_snapshot.CreateVolumeSnapshotRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_volume_snapshot.VolumeSnapshot() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_volume_snapshot.VolumeSnapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_volume_snapshot(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_volume_snapshot_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_volume_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "volumeSnapshot", ))) - - -def test_create_volume_snapshot_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_volume_snapshot.VolumeSnapshot() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_volume_snapshot.VolumeSnapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_volume_snapshot(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/volumes/*}/snapshots" % client.transport._host, args[1]) - - -def test_create_volume_snapshot_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_volume_snapshot( - gcb_volume_snapshot.CreateVolumeSnapshotRequest(), - parent='parent_value', - volume_snapshot=gcb_volume_snapshot.VolumeSnapshot(name='name_value'), - ) - - -def test_restore_volume_snapshot_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.restore_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.restore_volume_snapshot] = mock_rpc - - request = {} - client.restore_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_restore_volume_snapshot_rest_required_fields(request_type=gcb_volume_snapshot.RestoreVolumeSnapshotRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["volume_snapshot"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["volumeSnapshot"] = 'volume_snapshot_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).restore_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "volumeSnapshot" in jsonified_request - assert jsonified_request["volumeSnapshot"] == 'volume_snapshot_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.restore_volume_snapshot(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_restore_volume_snapshot_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.restore_volume_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("volumeSnapshot", ))) - - -def test_restore_volume_snapshot_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'volume_snapshot': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - volume_snapshot='volume_snapshot_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.restore_volume_snapshot(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{volume_snapshot=projects/*/locations/*/volumes/*/snapshots/*}:restoreVolumeSnapshot" % client.transport._host, args[1]) - - -def test_restore_volume_snapshot_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.restore_volume_snapshot( - gcb_volume_snapshot.RestoreVolumeSnapshotRequest(), - volume_snapshot='volume_snapshot_value', - ) - - -def test_delete_volume_snapshot_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_volume_snapshot] = mock_rpc - - request = {} - client.delete_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_volume_snapshot_rest_required_fields(request_type=volume_snapshot.DeleteVolumeSnapshotRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_volume_snapshot(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_volume_snapshot_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_volume_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_volume_snapshot_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_volume_snapshot(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}" % client.transport._host, args[1]) - - -def test_delete_volume_snapshot_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_volume_snapshot( - volume_snapshot.DeleteVolumeSnapshotRequest(), - name='name_value', - ) - - -def test_get_volume_snapshot_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_volume_snapshot in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_volume_snapshot] = mock_rpc - - request = {} - client.get_volume_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_volume_snapshot(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_volume_snapshot_rest_required_fields(request_type=volume_snapshot.GetVolumeSnapshotRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_volume_snapshot._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = volume_snapshot.VolumeSnapshot() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume_snapshot.VolumeSnapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_volume_snapshot(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_volume_snapshot_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_volume_snapshot._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_volume_snapshot_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume_snapshot.VolumeSnapshot() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume_snapshot.VolumeSnapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_volume_snapshot(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*/snapshots/*}" % client.transport._host, args[1]) - - -def test_get_volume_snapshot_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_volume_snapshot( - volume_snapshot.GetVolumeSnapshotRequest(), - name='name_value', - ) - - -def test_list_volume_snapshots_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_volume_snapshots in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_volume_snapshots] = mock_rpc - - request = {} - client.list_volume_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_volume_snapshots(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_volume_snapshots_rest_required_fields(request_type=volume_snapshot.ListVolumeSnapshotsRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_volume_snapshots._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_volume_snapshots._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = volume_snapshot.ListVolumeSnapshotsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume_snapshot.ListVolumeSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_volume_snapshots(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_volume_snapshots_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_volume_snapshots._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_volume_snapshots_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume_snapshot.ListVolumeSnapshotsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = volume_snapshot.ListVolumeSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_volume_snapshots(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/volumes/*}/snapshots" % client.transport._host, args[1]) - - -def test_list_volume_snapshots_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_volume_snapshots( - volume_snapshot.ListVolumeSnapshotsRequest(), - parent='parent_value', - ) - - -def test_list_volume_snapshots_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='abc', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[], - next_page_token='def', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - ], - next_page_token='ghi', - ), - volume_snapshot.ListVolumeSnapshotsResponse( - volume_snapshots=[ - volume_snapshot.VolumeSnapshot(), - volume_snapshot.VolumeSnapshot(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(volume_snapshot.ListVolumeSnapshotsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - - pager = client.list_volume_snapshots(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, volume_snapshot.VolumeSnapshot) - for i in results) - - pages = list(client.list_volume_snapshots(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_lun_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_lun in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_lun] = mock_rpc - - request = {} - client.get_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_lun_rest_required_fields(request_type=lun.GetLunRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_lun._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_lun._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = lun.Lun() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = lun.Lun.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_lun(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_lun_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_lun._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_lun_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = lun.Lun() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3/luns/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = lun.Lun.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_lun(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*/luns/*}" % client.transport._host, args[1]) - - -def test_get_lun_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lun( - lun.GetLunRequest(), - name='name_value', - ) - - -def test_list_luns_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_luns in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_luns] = mock_rpc - - request = {} - client.list_luns(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_luns(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_luns_rest_required_fields(request_type=lun.ListLunsRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_luns._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_luns._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = lun.ListLunsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = lun.ListLunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_luns(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_luns_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_luns._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_luns_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = lun.ListLunsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = lun.ListLunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_luns(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*/volumes/*}/luns" % client.transport._host, args[1]) - - -def test_list_luns_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_luns( - lun.ListLunsRequest(), - parent='parent_value', - ) - - -def test_list_luns_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - lun.Lun(), - ], - next_page_token='abc', - ), - lun.ListLunsResponse( - luns=[], - next_page_token='def', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - ], - next_page_token='ghi', - ), - lun.ListLunsResponse( - luns=[ - lun.Lun(), - lun.Lun(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(lun.ListLunsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - - pager = client.list_luns(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, lun.Lun) - for i in results) - - pages = list(client.list_luns(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_evict_lun_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.evict_lun in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.evict_lun] = mock_rpc - - request = {} - client.evict_lun(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.evict_lun(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_evict_lun_rest_required_fields(request_type=lun.EvictLunRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).evict_lun._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).evict_lun._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.evict_lun(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_evict_lun_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.evict_lun._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_evict_lun_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/volumes/sample3/luns/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.evict_lun(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/volumes/*/luns/*}:evict" % client.transport._host, args[1]) - - -def test_evict_lun_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.evict_lun( - lun.EvictLunRequest(), - name='name_value', - ) - - -def test_get_nfs_share_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_nfs_share] = mock_rpc - - request = {} - client.get_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_nfs_share_rest_required_fields(request_type=nfs_share.GetNfsShareRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = nfs_share.NfsShare.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_nfs_share(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_nfs_share_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_nfs_share_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = nfs_share.NfsShare.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_nfs_share(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/nfsShares/*}" % client.transport._host, args[1]) - - -def test_get_nfs_share_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_nfs_share( - nfs_share.GetNfsShareRequest(), - name='name_value', - ) - - -def test_list_nfs_shares_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_nfs_shares in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_nfs_shares] = mock_rpc - - request = {} - client.list_nfs_shares(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_nfs_shares(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_nfs_shares_rest_required_fields(request_type=nfs_share.ListNfsSharesRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_nfs_shares._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_nfs_shares._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = nfs_share.ListNfsSharesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = nfs_share.ListNfsSharesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_nfs_shares(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_nfs_shares_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_nfs_shares._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_nfs_shares_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = nfs_share.ListNfsSharesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = nfs_share.ListNfsSharesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_nfs_shares(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/nfsShares" % client.transport._host, args[1]) - - -def test_list_nfs_shares_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_nfs_shares( - nfs_share.ListNfsSharesRequest(), - parent='parent_value', - ) - - -def test_list_nfs_shares_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - next_page_token='abc', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[], - next_page_token='def', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - ], - next_page_token='ghi', - ), - nfs_share.ListNfsSharesResponse( - nfs_shares=[ - nfs_share.NfsShare(), - nfs_share.NfsShare(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(nfs_share.ListNfsSharesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_nfs_shares(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, nfs_share.NfsShare) - for i in results) - - pages = list(client.list_nfs_shares(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_nfs_share_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_nfs_share] = mock_rpc - - request = {} - client.update_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_nfs_share_rest_required_fields(request_type=gcb_nfs_share.UpdateNfsShareRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_nfs_share._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_nfs_share(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_nfs_share_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("nfsShare", ))) - - -def test_update_nfs_share_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'nfs_share': {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_nfs_share(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{nfs_share.name=projects/*/locations/*/nfsShares/*}" % client.transport._host, args[1]) - - -def test_update_nfs_share_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_nfs_share( - gcb_nfs_share.UpdateNfsShareRequest(), - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_create_nfs_share_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_nfs_share] = mock_rpc - - request = {} - client.create_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_nfs_share_rest_required_fields(request_type=gcb_nfs_share.CreateNfsShareRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_nfs_share(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_nfs_share_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "nfsShare", ))) - - -def test_create_nfs_share_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_nfs_share(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/nfsShares" % client.transport._host, args[1]) - - -def test_create_nfs_share_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_nfs_share( - gcb_nfs_share.CreateNfsShareRequest(), - parent='parent_value', - nfs_share=gcb_nfs_share.NfsShare(name='name_value'), - ) - - -def test_rename_nfs_share_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_nfs_share] = mock_rpc - - request = {} - client.rename_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_nfs_share_rest_required_fields(request_type=nfs_share.RenameNfsShareRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_nfsshare_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newNfsshareId"] = 'new_nfsshare_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newNfsshareId" in jsonified_request - assert jsonified_request["newNfsshareId"] == 'new_nfsshare_id_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = nfs_share.NfsShare.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_nfs_share(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_nfs_share_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newNfsshareId", ))) - - -def test_rename_nfs_share_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = nfs_share.NfsShare.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_nfs_share(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/nfsShares/*}:rename" % client.transport._host, args[1]) - - -def test_rename_nfs_share_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_nfs_share( - nfs_share.RenameNfsShareRequest(), - name='name_value', - new_nfsshare_id='new_nfsshare_id_value', - ) - - -def test_delete_nfs_share_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_nfs_share in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_nfs_share] = mock_rpc - - request = {} - client.delete_nfs_share(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_nfs_share(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_nfs_share_rest_required_fields(request_type=nfs_share.DeleteNfsShareRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_nfs_share._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_nfs_share(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_nfs_share_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_nfs_share._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_nfs_share_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_nfs_share(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/nfsShares/*}" % client.transport._host, args[1]) - - -def test_delete_nfs_share_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_nfs_share( - nfs_share.DeleteNfsShareRequest(), - name='name_value', - ) - - -def test_list_provisioning_quotas_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_provisioning_quotas in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_provisioning_quotas] = mock_rpc - - request = {} - client.list_provisioning_quotas(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_provisioning_quotas(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_provisioning_quotas_rest_required_fields(request_type=provisioning.ListProvisioningQuotasRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_provisioning_quotas._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_provisioning_quotas._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning.ListProvisioningQuotasResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ListProvisioningQuotasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_provisioning_quotas(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_provisioning_quotas_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_provisioning_quotas._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_provisioning_quotas_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ListProvisioningQuotasResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning.ListProvisioningQuotasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_provisioning_quotas(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/provisioningQuotas" % client.transport._host, args[1]) - - -def test_list_provisioning_quotas_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_provisioning_quotas( - provisioning.ListProvisioningQuotasRequest(), - parent='parent_value', - ) - - -def test_list_provisioning_quotas_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - next_page_token='abc', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[], - next_page_token='def', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - ], - next_page_token='ghi', - ), - provisioning.ListProvisioningQuotasResponse( - provisioning_quotas=[ - provisioning.ProvisioningQuota(), - provisioning.ProvisioningQuota(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(provisioning.ListProvisioningQuotasResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_provisioning_quotas(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, provisioning.ProvisioningQuota) - for i in results) - - pages = list(client.list_provisioning_quotas(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_submit_provisioning_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.submit_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.submit_provisioning_config] = mock_rpc - - request = {} - client.submit_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.submit_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_submit_provisioning_config_rest_required_fields(request_type=provisioning.SubmitProvisioningConfigRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_provisioning_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).submit_provisioning_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning.SubmitProvisioningConfigResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.SubmitProvisioningConfigResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.submit_provisioning_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_submit_provisioning_config_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.submit_provisioning_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "provisioningConfig", ))) - - -def test_submit_provisioning_config_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.SubmitProvisioningConfigResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning.SubmitProvisioningConfigResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.submit_provisioning_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/provisioningConfigs:submit" % client.transport._host, args[1]) - - -def test_submit_provisioning_config_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.submit_provisioning_config( - provisioning.SubmitProvisioningConfigRequest(), - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - -def test_get_provisioning_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_provisioning_config] = mock_rpc - - request = {} - client.get_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_provisioning_config_rest_required_fields(request_type=provisioning.GetProvisioningConfigRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provisioning_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provisioning_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_provisioning_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_provisioning_config_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_provisioning_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_provisioning_config_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_provisioning_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/provisioningConfigs/*}" % client.transport._host, args[1]) - - -def test_get_provisioning_config_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_provisioning_config( - provisioning.GetProvisioningConfigRequest(), - name='name_value', - ) - - -def test_create_provisioning_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_provisioning_config] = mock_rpc - - request = {} - client.create_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_provisioning_config_rest_required_fields(request_type=provisioning.CreateProvisioningConfigRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_provisioning_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_provisioning_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("email", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_provisioning_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_provisioning_config_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_provisioning_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("email", )) & set(("parent", "provisioningConfig", ))) - - -def test_create_provisioning_config_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_provisioning_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/provisioningConfigs" % client.transport._host, args[1]) - - -def test_create_provisioning_config_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_provisioning_config( - provisioning.CreateProvisioningConfigRequest(), - parent='parent_value', - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - ) - - -def test_update_provisioning_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_provisioning_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_provisioning_config] = mock_rpc - - request = {} - client.update_provisioning_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_provisioning_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_provisioning_config_rest_required_fields(request_type=provisioning.UpdateProvisioningConfigRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_provisioning_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_provisioning_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("email", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_provisioning_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_provisioning_config_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_provisioning_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("email", "updateMask", )) & set(("provisioningConfig", "updateMask", ))) - - -def test_update_provisioning_config_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'provisioning_config': {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_provisioning_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{provisioning_config.name=projects/*/locations/*/provisioningConfigs/*}" % client.transport._host, args[1]) - - -def test_update_provisioning_config_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_provisioning_config( - provisioning.UpdateProvisioningConfigRequest(), - provisioning_config=provisioning.ProvisioningConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_rename_network_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_network in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_network] = mock_rpc - - request = {} - client.rename_network(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_network(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_network_rest_required_fields(request_type=network.RenameNetworkRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_network_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_network._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newNetworkId"] = 'new_network_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_network._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newNetworkId" in jsonified_request - assert jsonified_request["newNetworkId"] == 'new_network_id_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = network.Network() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.Network.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_network(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_network_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_network._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newNetworkId", ))) - - -def test_rename_network_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.Network() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/networks/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_network_id='new_network_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = network.Network.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_network(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=projects/*/locations/*/networks/*}:rename" % client.transport._host, args[1]) - - -def test_rename_network_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_network( - network.RenameNetworkRequest(), - name='name_value', - new_network_id='new_network_id_value', - ) - - -def test_list_os_images_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_os_images in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_os_images] = mock_rpc - - request = {} - client.list_os_images(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_os_images(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_os_images_rest_required_fields(request_type=osimage.ListOSImagesRequest): - transport_class = transports.BareMetalSolutionRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_os_images._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_os_images._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = osimage.ListOSImagesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = osimage.ListOSImagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_os_images(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_os_images_rest_unset_required_fields(): - transport = transports.BareMetalSolutionRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_os_images._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_os_images_rest_flattened(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = osimage.ListOSImagesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = osimage.ListOSImagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_os_images(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*/locations/*}/osImages" % client.transport._host, args[1]) - - -def test_list_os_images_rest_flattened_error(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_os_images( - osimage.ListOSImagesRequest(), - parent='parent_value', - ) - - -def test_list_os_images_rest_pager(transport: str = 'rest'): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - osimage.OSImage(), - ], - next_page_token='abc', - ), - osimage.ListOSImagesResponse( - os_images=[], - next_page_token='def', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - ], - next_page_token='ghi', - ), - osimage.ListOSImagesResponse( - os_images=[ - osimage.OSImage(), - osimage.OSImage(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(osimage.ListOSImagesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_os_images(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, osimage.OSImage) - for i in results) - - pages = list(client.list_os_images(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BareMetalSolutionGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BareMetalSolutionGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BareMetalSolutionClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BareMetalSolutionGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BareMetalSolutionClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BareMetalSolutionClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BareMetalSolutionGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BareMetalSolutionClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BareMetalSolutionGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BareMetalSolutionClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BareMetalSolutionGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BareMetalSolutionGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BareMetalSolutionGrpcTransport, - transports.BareMetalSolutionGrpcAsyncIOTransport, - transports.BareMetalSolutionRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = BareMetalSolutionClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instances_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value = instance.ListInstancesResponse() - client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value = instance.Instance() - client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_instance.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_instance_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - call.return_value = instance.Instance() - client.rename_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.RenameInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_reset_instance_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.reset_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.ResetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_start_instance_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.start_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.StartInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_stop_instance_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.stop_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.StopInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_enable_interactive_serial_console_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.enable_interactive_serial_console(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.EnableInteractiveSerialConsoleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_disable_interactive_serial_console_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.disable_interactive_serial_console(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.DisableInteractiveSerialConsoleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_detach_lun_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.detach_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_instance.DetachLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_ssh_keys_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - call.return_value = ssh_key.ListSSHKeysResponse() - client.list_ssh_keys(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ssh_key.ListSSHKeysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_ssh_key_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - call.return_value = gcb_ssh_key.SSHKey() - client.create_ssh_key(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_ssh_key.CreateSSHKeyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_ssh_key_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - call.return_value = None - client.delete_ssh_key(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ssh_key.DeleteSSHKeyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_volumes_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - call.return_value = volume.ListVolumesResponse() - client.list_volumes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.ListVolumesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_volume_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - call.return_value = volume.Volume() - client.get_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.GetVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_volume_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume.UpdateVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_volume_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - call.return_value = volume.Volume() - client.rename_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.RenameVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_evict_volume_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.evict_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.EvictVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_resize_volume_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.resize_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume.ResizeVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_networks_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - call.return_value = network.ListNetworksResponse() - client.list_networks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.ListNetworksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_network_usage_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - call.return_value = network.ListNetworkUsageResponse() - client.list_network_usage(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.ListNetworkUsageRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_network_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - call.return_value = network.Network() - client.get_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.GetNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_network_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_network.UpdateNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_volume_snapshot_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - call.return_value = gcb_volume_snapshot.VolumeSnapshot() - client.create_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_volume_snapshot_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.restore_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_volume_snapshot_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - call.return_value = None - client.delete_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.DeleteVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_volume_snapshot_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - call.return_value = volume_snapshot.VolumeSnapshot() - client.get_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.GetVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_volume_snapshots_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - call.return_value = volume_snapshot.ListVolumeSnapshotsResponse() - client.list_volume_snapshots(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.ListVolumeSnapshotsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lun_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - call.return_value = lun.Lun() - client.get_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.GetLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_luns_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - call.return_value = lun.ListLunsResponse() - client.list_luns(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.ListLunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_evict_lun_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.evict_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.EvictLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_nfs_share_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - call.return_value = nfs_share.NfsShare() - client.get_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.GetNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_nfs_shares_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - call.return_value = nfs_share.ListNfsSharesResponse() - client.list_nfs_shares(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.ListNfsSharesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_nfs_share_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_nfs_share.UpdateNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_nfs_share_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_nfs_share.CreateNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_nfs_share_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - call.return_value = nfs_share.NfsShare() - client.rename_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.RenameNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_nfs_share_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.DeleteNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_provisioning_quotas_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - call.return_value = provisioning.ListProvisioningQuotasResponse() - client.list_provisioning_quotas(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.ListProvisioningQuotasRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_submit_provisioning_config_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - call.return_value = provisioning.SubmitProvisioningConfigResponse() - client.submit_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.SubmitProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_provisioning_config_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - call.return_value = provisioning.ProvisioningConfig() - client.get_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.GetProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_provisioning_config_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - call.return_value = provisioning.ProvisioningConfig() - client.create_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.CreateProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_provisioning_config_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - call.return_value = provisioning.ProvisioningConfig() - client.update_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.UpdateProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_network_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - call.return_value = network.Network() - client.rename_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.RenameNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_os_images_empty_call_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - call.return_value = osimage.ListOSImagesResponse() - client.list_os_images(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = osimage.ListOSImagesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = BareMetalSolutionAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_instances_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_instance_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - )) - await client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_instance_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_instance.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_instance_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - )) - await client.rename_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.RenameInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_reset_instance_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.reset_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.ResetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_start_instance_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.start_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.StartInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_stop_instance_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.stop_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.StopInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_enable_interactive_serial_console_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.enable_interactive_serial_console(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.EnableInteractiveSerialConsoleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_disable_interactive_serial_console_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.disable_interactive_serial_console(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.DisableInteractiveSerialConsoleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_detach_lun_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.detach_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_instance.DetachLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_ssh_keys_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(ssh_key.ListSSHKeysResponse( - next_page_token='next_page_token_value', - )) - await client.list_ssh_keys(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ssh_key.ListSSHKeysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_ssh_key_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_ssh_key.SSHKey( - name='name_value', - public_key='public_key_value', - )) - await client.create_ssh_key(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_ssh_key.CreateSSHKeyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_ssh_key_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_ssh_key(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ssh_key.DeleteSSHKeyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_volumes_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.ListVolumesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_volumes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.ListVolumesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_volume_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - )) - await client.get_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.GetVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_volume_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume.UpdateVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_volume_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - )) - await client.rename_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.RenameVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_evict_volume_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.evict_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.EvictVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_resize_volume_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.resize_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume.ResizeVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_networks_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_networks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.ListNetworksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_network_usage_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.ListNetworkUsageResponse( - )) - await client.list_network_usage(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.ListNetworkUsageRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_network_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - )) - await client.get_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.GetNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_network_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_network.UpdateNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_volume_snapshot_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - )) - await client.create_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restore_volume_snapshot_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.restore_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_volume_snapshot_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.DeleteVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_volume_snapshot_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - )) - await client.get_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.GetVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_volume_snapshots_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(volume_snapshot.ListVolumeSnapshotsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_volume_snapshots(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.ListVolumeSnapshotsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_lun_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.Lun( - name='name_value', - id='id_value', - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume='storage_volume_value', - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid='wwid_value', - instances=['instances_value'], - )) - await client.get_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.GetLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_luns_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(lun.ListLunsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_luns(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.ListLunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_evict_lun_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.evict_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.EvictLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_nfs_share_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - )) - await client.get_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.GetNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_nfs_shares_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.ListNfsSharesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_nfs_shares(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.ListNfsSharesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_nfs_share_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_nfs_share.UpdateNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_nfs_share_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_nfs_share.CreateNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_nfs_share_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - )) - await client.rename_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.RenameNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_nfs_share_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.DeleteNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_provisioning_quotas_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ListProvisioningQuotasResponse( - next_page_token='next_page_token_value', - )) - await client.list_provisioning_quotas(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.ListProvisioningQuotasRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_submit_provisioning_config_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.SubmitProvisioningConfigResponse( - )) - await client.submit_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.SubmitProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_provisioning_config_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - )) - await client.get_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.GetProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_provisioning_config_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - )) - await client.create_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.CreateProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_provisioning_config_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - )) - await client.update_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.UpdateProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_network_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - )) - await client.rename_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.RenameNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_os_images_empty_call_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(osimage.ListOSImagesResponse( - next_page_token='next_page_token_value', - )) - await client.list_os_images(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = osimage.ListOSImagesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = BareMetalSolutionClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_instances_rest_bad_request(request_type=instance.ListInstancesRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_instances(request) - - -@pytest.mark.parametrize("request_type", [ - instance.ListInstancesRequest, - dict, -]) -def test_list_instances_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = instance.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = instance.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_instances") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.ListInstancesRequest.pb(instance.ListInstancesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = instance.ListInstancesResponse.to_json(instance.ListInstancesResponse()) - req.return_value.content = return_value - - request = instance.ListInstancesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = instance.ListInstancesResponse() - post_with_metadata.return_value = instance.ListInstancesResponse(), metadata - - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_instance_rest_bad_request(request_type=instance.GetInstanceRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_instance(request) - - -@pytest.mark.parametrize("request_type", [ - instance.GetInstanceRequest, - dict, -]) -def test_get_instance_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.machine_type == 'machine_type_value' - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == 'os_image_value' - assert response.pod == 'pod_value' - assert response.network_template == 'network_template_value' - assert response.login_info == 'login_info_value' - assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC - assert response.firmware_version == 'firmware_version_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.GetInstanceRequest.pb(instance.GetInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = instance.Instance.to_json(instance.Instance()) - req.return_value.content = return_value - - request = instance.GetInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = instance.Instance() - post_with_metadata.return_value = instance.Instance(), metadata - - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_instance_rest_bad_request(request_type=gcb_instance.UpdateInstanceRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_instance(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_instance.UpdateInstanceRequest, - dict, -]) -def test_update_instance_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'id': 'id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'machine_type': 'machine_type_value', 'state': 1, 'hyperthreading_enabled': True, 'labels': {}, 'luns': [{'name': 'name_value', 'id': 'id_value', 'state': 1, 'size_gb': 739, 'multiprotocol_type': 1, 'storage_volume': 'storage_volume_value', 'shareable': True, 'boot_lun': True, 'storage_type': 1, 'wwid': 'wwid_value', 'expire_time': {}, 'instances': ['instances_value1', 'instances_value2']}], 'volumes': [{'name': 'name_value', 'id': 'id_value', 'storage_type': 1, 'state': 1, 'requested_size_gib': 1917, 'originally_requested_size_gib': 3094, 'current_size_gib': 1710, 'emergency_size_gib': 1898, 'max_size_gib': 1265, 'auto_grown_size_gib': 2032, 'remaining_space_gib': 1974, 'snapshot_reservation_detail': {'reserved_space_gib': 1884, 'reserved_space_used_percent': 2859, 'reserved_space_remaining_gib': 2933, 'reserved_space_percent': 2331}, 'snapshot_auto_delete_behavior': 1, 'labels': {}, 'snapshot_enabled': True, 'pod': 'pod_value', 'protocol': 1, 'boot_volume': True, 'performance_tier': 1, 'notes': 'notes_value', 'workload_profile': 1, 'expire_time': {}, 'instances': ['instances_value1', 'instances_value2'], 'attached': True}], 'networks': [{'name': 'name_value', 'id': 'id_value', 'type_': 1, 'ip_address': 'ip_address_value', 'mac_address': ['mac_address_value1', 'mac_address_value2'], 'state': 1, 'vlan_id': 'vlan_id_value', 'cidr': 'cidr_value', 'vrf': {'name': 'name_value', 'state': 1, 'qos_policy': {'bandwidth_gbps': 0.1472}, 'vlan_attachments': [{'peer_vlan_id': 1256, 'peer_ip': 'peer_ip_value', 'router_ip': 'router_ip_value', 'pairing_key': 'pairing_key_value', 'qos_policy': {}, 'id': 'id_value', 'interconnect_attachment': 'interconnect_attachment_value'}]}, 'labels': {}, 'services_cidr': 'services_cidr_value', 'reservations': [{'start_address': 'start_address_value', 'end_address': 'end_address_value', 'note': 'note_value'}], 'pod': 'pod_value', 'mount_points': [{'instance': 'instance_value', 'logical_interface': 'logical_interface_value', 'default_gateway': True, 'ip_address': 'ip_address_value'}], 'jumbo_frames_enabled': True, 'gateway_ip': 'gateway_ip_value'}], 'interactive_serial_console_enabled': True, 'os_image': 'os_image_value', 'pod': 'pod_value', 'network_template': 'network_template_value', 'logical_interfaces': [{'logical_network_interfaces': [{'network': 'network_value', 'ip_address': 'ip_address_value', 'default_gateway': True, 'network_type': 1, 'id': 'id_value'}], 'name': 'name_value', 'interface_index': 1576}], 'login_info': 'login_info_value', 'workload_profile': 1, 'firmware_version': 'firmware_version_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_instance.UpdateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_update_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_instance.UpdateInstanceRequest.pb(gcb_instance.UpdateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_instance.UpdateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_instance_rest_bad_request(request_type=instance.RenameInstanceRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_instance(request) - - -@pytest.mark.parametrize("request_type", [ - instance.RenameInstanceRequest, - dict, -]) -def test_rename_instance_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = instance.Instance( - name='name_value', - id='id_value', - machine_type='machine_type_value', - state=instance.Instance.State.PROVISIONING, - hyperthreading_enabled=True, - interactive_serial_console_enabled=True, - os_image='os_image_value', - pod='pod_value', - network_template='network_template_value', - login_info='login_info_value', - workload_profile=common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC, - firmware_version='firmware_version_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = instance.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, instance.Instance) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.machine_type == 'machine_type_value' - assert response.state == instance.Instance.State.PROVISIONING - assert response.hyperthreading_enabled is True - assert response.interactive_serial_console_enabled is True - assert response.os_image == 'os_image_value' - assert response.pod == 'pod_value' - assert response.network_template == 'network_template_value' - assert response.login_info == 'login_info_value' - assert response.workload_profile == common.WorkloadProfile.WORKLOAD_PROFILE_GENERIC - assert response.firmware_version == 'firmware_version_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_instance_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_instance") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_rename_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.RenameInstanceRequest.pb(instance.RenameInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = instance.Instance.to_json(instance.Instance()) - req.return_value.content = return_value - - request = instance.RenameInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = instance.Instance() - post_with_metadata.return_value = instance.Instance(), metadata - - client.rename_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_reset_instance_rest_bad_request(request_type=instance.ResetInstanceRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.reset_instance(request) - - -@pytest.mark.parametrize("request_type", [ - instance.ResetInstanceRequest, - dict, -]) -def test_reset_instance_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.reset_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reset_instance_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_reset_instance") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_reset_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_reset_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.ResetInstanceRequest.pb(instance.ResetInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = instance.ResetInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.reset_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_start_instance_rest_bad_request(request_type=instance.StartInstanceRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.start_instance(request) - - -@pytest.mark.parametrize("request_type", [ - instance.StartInstanceRequest, - dict, -]) -def test_start_instance_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.start_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_start_instance_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_start_instance") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_start_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_start_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.StartInstanceRequest.pb(instance.StartInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = instance.StartInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.start_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_stop_instance_rest_bad_request(request_type=instance.StopInstanceRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.stop_instance(request) - - -@pytest.mark.parametrize("request_type", [ - instance.StopInstanceRequest, - dict, -]) -def test_stop_instance_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.stop_instance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_stop_instance_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_stop_instance") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_stop_instance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_stop_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.StopInstanceRequest.pb(instance.StopInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = instance.StopInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.stop_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_enable_interactive_serial_console_rest_bad_request(request_type=instance.EnableInteractiveSerialConsoleRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.enable_interactive_serial_console(request) - - -@pytest.mark.parametrize("request_type", [ - instance.EnableInteractiveSerialConsoleRequest, - dict, -]) -def test_enable_interactive_serial_console_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.enable_interactive_serial_console(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_enable_interactive_serial_console_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_enable_interactive_serial_console") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_enable_interactive_serial_console_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_enable_interactive_serial_console") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.EnableInteractiveSerialConsoleRequest.pb(instance.EnableInteractiveSerialConsoleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = instance.EnableInteractiveSerialConsoleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.enable_interactive_serial_console(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_disable_interactive_serial_console_rest_bad_request(request_type=instance.DisableInteractiveSerialConsoleRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.disable_interactive_serial_console(request) - - -@pytest.mark.parametrize("request_type", [ - instance.DisableInteractiveSerialConsoleRequest, - dict, -]) -def test_disable_interactive_serial_console_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.disable_interactive_serial_console(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_disable_interactive_serial_console_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_disable_interactive_serial_console") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_disable_interactive_serial_console_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_disable_interactive_serial_console") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = instance.DisableInteractiveSerialConsoleRequest.pb(instance.DisableInteractiveSerialConsoleRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = instance.DisableInteractiveSerialConsoleRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.disable_interactive_serial_console(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_detach_lun_rest_bad_request(request_type=gcb_instance.DetachLunRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'instance': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.detach_lun(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_instance.DetachLunRequest, - dict, -]) -def test_detach_lun_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'instance': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.detach_lun(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_detach_lun_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_detach_lun") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_detach_lun_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_detach_lun") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_instance.DetachLunRequest.pb(gcb_instance.DetachLunRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_instance.DetachLunRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.detach_lun(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_ssh_keys_rest_bad_request(request_type=ssh_key.ListSSHKeysRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_ssh_keys(request) - - -@pytest.mark.parametrize("request_type", [ - ssh_key.ListSSHKeysRequest, - dict, -]) -def test_list_ssh_keys_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = ssh_key.ListSSHKeysResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = ssh_key.ListSSHKeysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_ssh_keys(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSSHKeysPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_ssh_keys_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_ssh_keys") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_ssh_keys_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_ssh_keys") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = ssh_key.ListSSHKeysRequest.pb(ssh_key.ListSSHKeysRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = ssh_key.ListSSHKeysResponse.to_json(ssh_key.ListSSHKeysResponse()) - req.return_value.content = return_value - - request = ssh_key.ListSSHKeysRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = ssh_key.ListSSHKeysResponse() - post_with_metadata.return_value = ssh_key.ListSSHKeysResponse(), metadata - - client.list_ssh_keys(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_ssh_key_rest_bad_request(request_type=gcb_ssh_key.CreateSSHKeyRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_ssh_key(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_ssh_key.CreateSSHKeyRequest, - dict, -]) -def test_create_ssh_key_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["ssh_key"] = {'name': 'name_value', 'public_key': 'public_key_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_ssh_key.CreateSSHKeyRequest.meta.fields["ssh_key"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["ssh_key"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["ssh_key"][field])): - del request_init["ssh_key"][field][i][subfield] - else: - del request_init["ssh_key"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_ssh_key.SSHKey( - name='name_value', - public_key='public_key_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_ssh_key.SSHKey.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_ssh_key(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_ssh_key.SSHKey) - assert response.name == 'name_value' - assert response.public_key == 'public_key_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_ssh_key_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_ssh_key") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_ssh_key_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_create_ssh_key") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_ssh_key.CreateSSHKeyRequest.pb(gcb_ssh_key.CreateSSHKeyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_ssh_key.SSHKey.to_json(gcb_ssh_key.SSHKey()) - req.return_value.content = return_value - - request = gcb_ssh_key.CreateSSHKeyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_ssh_key.SSHKey() - post_with_metadata.return_value = gcb_ssh_key.SSHKey(), metadata - - client.create_ssh_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_ssh_key_rest_bad_request(request_type=ssh_key.DeleteSSHKeyRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/sshKeys/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_ssh_key(request) - - -@pytest.mark.parametrize("request_type", [ - ssh_key.DeleteSSHKeyRequest, - dict, -]) -def test_delete_ssh_key_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/sshKeys/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_ssh_key(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_ssh_key_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_delete_ssh_key") as pre: - pre.assert_not_called() - pb_message = ssh_key.DeleteSSHKeyRequest.pb(ssh_key.DeleteSSHKeyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = ssh_key.DeleteSSHKeyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_ssh_key(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_volumes_rest_bad_request(request_type=volume.ListVolumesRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_volumes(request) - - -@pytest.mark.parametrize("request_type", [ - volume.ListVolumesRequest, - dict, -]) -def test_list_volumes_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume.ListVolumesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.ListVolumesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_volumes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_volumes_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_volumes") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_volumes_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_volumes") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = volume.ListVolumesRequest.pb(volume.ListVolumesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = volume.ListVolumesResponse.to_json(volume.ListVolumesResponse()) - req.return_value.content = return_value - - request = volume.ListVolumesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = volume.ListVolumesResponse() - post_with_metadata.return_value = volume.ListVolumesResponse(), metadata - - client.list_volumes(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_volume_rest_bad_request(request_type=volume.GetVolumeRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_volume(request) - - -@pytest.mark.parametrize("request_type", [ - volume.GetVolumeRequest, - dict, -]) -def test_get_volume_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_volume(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.originally_requested_size_gib == 3094 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.max_size_gib == 1265 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert response.snapshot_auto_delete_behavior == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - assert response.snapshot_enabled is True - assert response.pod == 'pod_value' - assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL - assert response.boot_volume is True - assert response.performance_tier == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED - assert response.notes == 'notes_value' - assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC - assert response.instances == ['instances_value'] - assert response.attached is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_volume_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_volume") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_volume_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_volume") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = volume.GetVolumeRequest.pb(volume.GetVolumeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = volume.Volume.to_json(volume.Volume()) - req.return_value.content = return_value - - request = volume.GetVolumeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = volume.Volume() - post_with_metadata.return_value = volume.Volume(), metadata - - client.get_volume(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_volume_rest_bad_request(request_type=gcb_volume.UpdateVolumeRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'volume': {'name': 'projects/sample1/locations/sample2/volumes/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_volume(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume.UpdateVolumeRequest, - dict, -]) -def test_update_volume_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'volume': {'name': 'projects/sample1/locations/sample2/volumes/sample3'}} - request_init["volume"] = {'name': 'projects/sample1/locations/sample2/volumes/sample3', 'id': 'id_value', 'storage_type': 1, 'state': 1, 'requested_size_gib': 1917, 'originally_requested_size_gib': 3094, 'current_size_gib': 1710, 'emergency_size_gib': 1898, 'max_size_gib': 1265, 'auto_grown_size_gib': 2032, 'remaining_space_gib': 1974, 'snapshot_reservation_detail': {'reserved_space_gib': 1884, 'reserved_space_used_percent': 2859, 'reserved_space_remaining_gib': 2933, 'reserved_space_percent': 2331}, 'snapshot_auto_delete_behavior': 1, 'labels': {}, 'snapshot_enabled': True, 'pod': 'pod_value', 'protocol': 1, 'boot_volume': True, 'performance_tier': 1, 'notes': 'notes_value', 'workload_profile': 1, 'expire_time': {'seconds': 751, 'nanos': 543}, 'instances': ['instances_value1', 'instances_value2'], 'attached': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_volume.UpdateVolumeRequest.meta.fields["volume"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["volume"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["volume"][field])): - del request_init["volume"][field][i][subfield] - else: - del request_init["volume"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_volume(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_volume_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_volume") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_volume_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_update_volume") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_volume.UpdateVolumeRequest.pb(gcb_volume.UpdateVolumeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_volume.UpdateVolumeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_volume(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_volume_rest_bad_request(request_type=volume.RenameVolumeRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_volume(request) - - -@pytest.mark.parametrize("request_type", [ - volume.RenameVolumeRequest, - dict, -]) -def test_rename_volume_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume.Volume( - name='name_value', - id='id_value', - storage_type=volume.Volume.StorageType.SSD, - state=volume.Volume.State.CREATING, - requested_size_gib=1917, - originally_requested_size_gib=3094, - current_size_gib=1710, - emergency_size_gib=1898, - max_size_gib=1265, - auto_grown_size_gib=2032, - remaining_space_gib=1974, - snapshot_auto_delete_behavior=volume.Volume.SnapshotAutoDeleteBehavior.DISABLED, - snapshot_enabled=True, - pod='pod_value', - protocol=volume.Volume.Protocol.FIBRE_CHANNEL, - boot_volume=True, - performance_tier=common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED, - notes='notes_value', - workload_profile=volume.Volume.WorkloadProfile.GENERIC, - instances=['instances_value'], - attached=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume.Volume.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_volume(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, volume.Volume) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.storage_type == volume.Volume.StorageType.SSD - assert response.state == volume.Volume.State.CREATING - assert response.requested_size_gib == 1917 - assert response.originally_requested_size_gib == 3094 - assert response.current_size_gib == 1710 - assert response.emergency_size_gib == 1898 - assert response.max_size_gib == 1265 - assert response.auto_grown_size_gib == 2032 - assert response.remaining_space_gib == 1974 - assert response.snapshot_auto_delete_behavior == volume.Volume.SnapshotAutoDeleteBehavior.DISABLED - assert response.snapshot_enabled is True - assert response.pod == 'pod_value' - assert response.protocol == volume.Volume.Protocol.FIBRE_CHANNEL - assert response.boot_volume is True - assert response.performance_tier == common.VolumePerformanceTier.VOLUME_PERFORMANCE_TIER_SHARED - assert response.notes == 'notes_value' - assert response.workload_profile == volume.Volume.WorkloadProfile.GENERIC - assert response.instances == ['instances_value'] - assert response.attached is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_volume_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_volume") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_volume_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_rename_volume") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = volume.RenameVolumeRequest.pb(volume.RenameVolumeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = volume.Volume.to_json(volume.Volume()) - req.return_value.content = return_value - - request = volume.RenameVolumeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = volume.Volume() - post_with_metadata.return_value = volume.Volume(), metadata - - client.rename_volume(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_evict_volume_rest_bad_request(request_type=volume.EvictVolumeRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.evict_volume(request) - - -@pytest.mark.parametrize("request_type", [ - volume.EvictVolumeRequest, - dict, -]) -def test_evict_volume_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.evict_volume(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_evict_volume_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_evict_volume") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_evict_volume_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_evict_volume") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = volume.EvictVolumeRequest.pb(volume.EvictVolumeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = volume.EvictVolumeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.evict_volume(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_resize_volume_rest_bad_request(request_type=gcb_volume.ResizeVolumeRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'volume': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.resize_volume(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume.ResizeVolumeRequest, - dict, -]) -def test_resize_volume_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'volume': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.resize_volume(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_resize_volume_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_resize_volume") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_resize_volume_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_resize_volume") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_volume.ResizeVolumeRequest.pb(gcb_volume.ResizeVolumeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_volume.ResizeVolumeRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.resize_volume(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_networks_rest_bad_request(request_type=network.ListNetworksRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_networks(request) - - -@pytest.mark.parametrize("request_type", [ - network.ListNetworksRequest, - dict, -]) -def test_list_networks_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.ListNetworksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.ListNetworksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_networks(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNetworksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_networks_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_networks") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_networks_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_networks") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = network.ListNetworksRequest.pb(network.ListNetworksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = network.ListNetworksResponse.to_json(network.ListNetworksResponse()) - req.return_value.content = return_value - - request = network.ListNetworksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = network.ListNetworksResponse() - post_with_metadata.return_value = network.ListNetworksResponse(), metadata - - client.list_networks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_network_usage_rest_bad_request(request_type=network.ListNetworkUsageRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'location': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_network_usage(request) - - -@pytest.mark.parametrize("request_type", [ - network.ListNetworkUsageRequest, - dict, -]) -def test_list_network_usage_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'location': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.ListNetworkUsageResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.ListNetworkUsageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_network_usage(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, network.ListNetworkUsageResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_network_usage_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_network_usage") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_network_usage_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_network_usage") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = network.ListNetworkUsageRequest.pb(network.ListNetworkUsageRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = network.ListNetworkUsageResponse.to_json(network.ListNetworkUsageResponse()) - req.return_value.content = return_value - - request = network.ListNetworkUsageRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = network.ListNetworkUsageResponse() - post_with_metadata.return_value = network.ListNetworkUsageResponse(), metadata - - client.list_network_usage(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_network_rest_bad_request(request_type=network.GetNetworkRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/networks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_network(request) - - -@pytest.mark.parametrize("request_type", [ - network.GetNetworkRequest, - dict, -]) -def test_get_network_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/networks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.Network.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_network(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == 'ip_address_value' - assert response.mac_address == ['mac_address_value'] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == 'vlan_id_value' - assert response.cidr == 'cidr_value' - assert response.services_cidr == 'services_cidr_value' - assert response.pod == 'pod_value' - assert response.jumbo_frames_enabled is True - assert response.gateway_ip == 'gateway_ip_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_network_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_network") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_network_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_network") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = network.GetNetworkRequest.pb(network.GetNetworkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = network.Network.to_json(network.Network()) - req.return_value.content = return_value - - request = network.GetNetworkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = network.Network() - post_with_metadata.return_value = network.Network(), metadata - - client.get_network(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_network_rest_bad_request(request_type=gcb_network.UpdateNetworkRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'network': {'name': 'projects/sample1/locations/sample2/networks/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_network(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_network.UpdateNetworkRequest, - dict, -]) -def test_update_network_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'network': {'name': 'projects/sample1/locations/sample2/networks/sample3'}} - request_init["network"] = {'name': 'projects/sample1/locations/sample2/networks/sample3', 'id': 'id_value', 'type_': 1, 'ip_address': 'ip_address_value', 'mac_address': ['mac_address_value1', 'mac_address_value2'], 'state': 1, 'vlan_id': 'vlan_id_value', 'cidr': 'cidr_value', 'vrf': {'name': 'name_value', 'state': 1, 'qos_policy': {'bandwidth_gbps': 0.1472}, 'vlan_attachments': [{'peer_vlan_id': 1256, 'peer_ip': 'peer_ip_value', 'router_ip': 'router_ip_value', 'pairing_key': 'pairing_key_value', 'qos_policy': {}, 'id': 'id_value', 'interconnect_attachment': 'interconnect_attachment_value'}]}, 'labels': {}, 'services_cidr': 'services_cidr_value', 'reservations': [{'start_address': 'start_address_value', 'end_address': 'end_address_value', 'note': 'note_value'}], 'pod': 'pod_value', 'mount_points': [{'instance': 'instance_value', 'logical_interface': 'logical_interface_value', 'default_gateway': True, 'ip_address': 'ip_address_value'}], 'jumbo_frames_enabled': True, 'gateway_ip': 'gateway_ip_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_network.UpdateNetworkRequest.meta.fields["network"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["network"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["network"][field])): - del request_init["network"][field][i][subfield] - else: - del request_init["network"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_network(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_network_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_network") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_network_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_update_network") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_network.UpdateNetworkRequest.pb(gcb_network.UpdateNetworkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_network.UpdateNetworkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_network(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_volume_snapshot_rest_bad_request(request_type=gcb_volume_snapshot.CreateVolumeSnapshotRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_volume_snapshot(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume_snapshot.CreateVolumeSnapshotRequest, - dict, -]) -def test_create_volume_snapshot_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - request_init["volume_snapshot"] = {'name': 'name_value', 'id': 'id_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'storage_volume': 'storage_volume_value', 'type_': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_volume_snapshot.CreateVolumeSnapshotRequest.meta.fields["volume_snapshot"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["volume_snapshot"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["volume_snapshot"][field])): - del request_init["volume_snapshot"][field][i][subfield] - else: - del request_init["volume_snapshot"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_volume_snapshot.VolumeSnapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_volume_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_volume_snapshot.VolumeSnapshot) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.storage_volume == 'storage_volume_value' - assert response.type_ == gcb_volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_volume_snapshot_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_volume_snapshot") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_volume_snapshot_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_create_volume_snapshot") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_volume_snapshot.CreateVolumeSnapshotRequest.pb(gcb_volume_snapshot.CreateVolumeSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_volume_snapshot.VolumeSnapshot.to_json(gcb_volume_snapshot.VolumeSnapshot()) - req.return_value.content = return_value - - request = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_volume_snapshot.VolumeSnapshot() - post_with_metadata.return_value = gcb_volume_snapshot.VolumeSnapshot(), metadata - - client.create_volume_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_restore_volume_snapshot_rest_bad_request(request_type=gcb_volume_snapshot.RestoreVolumeSnapshotRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'volume_snapshot': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_volume_snapshot(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_volume_snapshot.RestoreVolumeSnapshotRequest, - dict, -]) -def test_restore_volume_snapshot_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'volume_snapshot': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_volume_snapshot(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_volume_snapshot_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_restore_volume_snapshot") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_restore_volume_snapshot_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_restore_volume_snapshot") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_volume_snapshot.RestoreVolumeSnapshotRequest.pb(gcb_volume_snapshot.RestoreVolumeSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.restore_volume_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_volume_snapshot_rest_bad_request(request_type=volume_snapshot.DeleteVolumeSnapshotRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_volume_snapshot(request) - - -@pytest.mark.parametrize("request_type", [ - volume_snapshot.DeleteVolumeSnapshotRequest, - dict, -]) -def test_delete_volume_snapshot_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_volume_snapshot(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_volume_snapshot_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_delete_volume_snapshot") as pre: - pre.assert_not_called() - pb_message = volume_snapshot.DeleteVolumeSnapshotRequest.pb(volume_snapshot.DeleteVolumeSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = volume_snapshot.DeleteVolumeSnapshotRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_volume_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_volume_snapshot_rest_bad_request(request_type=volume_snapshot.GetVolumeSnapshotRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_volume_snapshot(request) - - -@pytest.mark.parametrize("request_type", [ - volume_snapshot.GetVolumeSnapshotRequest, - dict, -]) -def test_get_volume_snapshot_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/snapshots/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume_snapshot.VolumeSnapshot( - name='name_value', - id='id_value', - description='description_value', - storage_volume='storage_volume_value', - type_=volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume_snapshot.VolumeSnapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_volume_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, volume_snapshot.VolumeSnapshot) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.description == 'description_value' - assert response.storage_volume == 'storage_volume_value' - assert response.type_ == volume_snapshot.VolumeSnapshot.SnapshotType.AD_HOC - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_volume_snapshot_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_volume_snapshot") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_volume_snapshot_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_volume_snapshot") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = volume_snapshot.GetVolumeSnapshotRequest.pb(volume_snapshot.GetVolumeSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = volume_snapshot.VolumeSnapshot.to_json(volume_snapshot.VolumeSnapshot()) - req.return_value.content = return_value - - request = volume_snapshot.GetVolumeSnapshotRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = volume_snapshot.VolumeSnapshot() - post_with_metadata.return_value = volume_snapshot.VolumeSnapshot(), metadata - - client.get_volume_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_volume_snapshots_rest_bad_request(request_type=volume_snapshot.ListVolumeSnapshotsRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_volume_snapshots(request) - - -@pytest.mark.parametrize("request_type", [ - volume_snapshot.ListVolumeSnapshotsRequest, - dict, -]) -def test_list_volume_snapshots_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = volume_snapshot.ListVolumeSnapshotsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = volume_snapshot.ListVolumeSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_volume_snapshots(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListVolumeSnapshotsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_volume_snapshots_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_volume_snapshots") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_volume_snapshots_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_volume_snapshots") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = volume_snapshot.ListVolumeSnapshotsRequest.pb(volume_snapshot.ListVolumeSnapshotsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = volume_snapshot.ListVolumeSnapshotsResponse.to_json(volume_snapshot.ListVolumeSnapshotsResponse()) - req.return_value.content = return_value - - request = volume_snapshot.ListVolumeSnapshotsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = volume_snapshot.ListVolumeSnapshotsResponse() - post_with_metadata.return_value = volume_snapshot.ListVolumeSnapshotsResponse(), metadata - - client.list_volume_snapshots(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_lun_rest_bad_request(request_type=lun.GetLunRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/luns/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_lun(request) - - -@pytest.mark.parametrize("request_type", [ - lun.GetLunRequest, - dict, -]) -def test_get_lun_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/luns/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = lun.Lun( - name='name_value', - id='id_value', - state=lun.Lun.State.CREATING, - size_gb=739, - multiprotocol_type=lun.Lun.MultiprotocolType.LINUX, - storage_volume='storage_volume_value', - shareable=True, - boot_lun=True, - storage_type=lun.Lun.StorageType.SSD, - wwid='wwid_value', - instances=['instances_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = lun.Lun.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_lun(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, lun.Lun) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.state == lun.Lun.State.CREATING - assert response.size_gb == 739 - assert response.multiprotocol_type == lun.Lun.MultiprotocolType.LINUX - assert response.storage_volume == 'storage_volume_value' - assert response.shareable is True - assert response.boot_lun is True - assert response.storage_type == lun.Lun.StorageType.SSD - assert response.wwid == 'wwid_value' - assert response.instances == ['instances_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_lun_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_lun") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_lun_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_lun") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = lun.GetLunRequest.pb(lun.GetLunRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = lun.Lun.to_json(lun.Lun()) - req.return_value.content = return_value - - request = lun.GetLunRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = lun.Lun() - post_with_metadata.return_value = lun.Lun(), metadata - - client.get_lun(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_luns_rest_bad_request(request_type=lun.ListLunsRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_luns(request) - - -@pytest.mark.parametrize("request_type", [ - lun.ListLunsRequest, - dict, -]) -def test_list_luns_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/volumes/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = lun.ListLunsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = lun.ListLunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_luns(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLunsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_luns_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_luns") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_luns_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_luns") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = lun.ListLunsRequest.pb(lun.ListLunsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = lun.ListLunsResponse.to_json(lun.ListLunsResponse()) - req.return_value.content = return_value - - request = lun.ListLunsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = lun.ListLunsResponse() - post_with_metadata.return_value = lun.ListLunsResponse(), metadata - - client.list_luns(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_evict_lun_rest_bad_request(request_type=lun.EvictLunRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/luns/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.evict_lun(request) - - -@pytest.mark.parametrize("request_type", [ - lun.EvictLunRequest, - dict, -]) -def test_evict_lun_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/volumes/sample3/luns/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.evict_lun(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_evict_lun_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_evict_lun") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_evict_lun_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_evict_lun") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = lun.EvictLunRequest.pb(lun.EvictLunRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = lun.EvictLunRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.evict_lun(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_nfs_share_rest_bad_request(request_type=nfs_share.GetNfsShareRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_nfs_share(request) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.GetNfsShareRequest, - dict, -]) -def test_get_nfs_share_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = nfs_share.NfsShare.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_nfs_share(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == 'name_value' - assert response.nfs_share_id == 'nfs_share_id_value' - assert response.id == 'id_value' - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == 'volume_value' - assert response.requested_size_gib == 1917 - assert response.storage_type == nfs_share.NfsShare.StorageType.SSD - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_nfs_share_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_nfs_share") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_nfs_share_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_nfs_share") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = nfs_share.GetNfsShareRequest.pb(nfs_share.GetNfsShareRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = nfs_share.NfsShare.to_json(nfs_share.NfsShare()) - req.return_value.content = return_value - - request = nfs_share.GetNfsShareRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = nfs_share.NfsShare() - post_with_metadata.return_value = nfs_share.NfsShare(), metadata - - client.get_nfs_share(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_nfs_shares_rest_bad_request(request_type=nfs_share.ListNfsSharesRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_nfs_shares(request) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.ListNfsSharesRequest, - dict, -]) -def test_list_nfs_shares_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = nfs_share.ListNfsSharesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = nfs_share.ListNfsSharesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_nfs_shares(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListNfsSharesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_nfs_shares_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_nfs_shares") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_nfs_shares_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_nfs_shares") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = nfs_share.ListNfsSharesRequest.pb(nfs_share.ListNfsSharesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = nfs_share.ListNfsSharesResponse.to_json(nfs_share.ListNfsSharesResponse()) - req.return_value.content = return_value - - request = nfs_share.ListNfsSharesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = nfs_share.ListNfsSharesResponse() - post_with_metadata.return_value = nfs_share.ListNfsSharesResponse(), metadata - - client.list_nfs_shares(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_nfs_share_rest_bad_request(request_type=gcb_nfs_share.UpdateNfsShareRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'nfs_share': {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_nfs_share(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_nfs_share.UpdateNfsShareRequest, - dict, -]) -def test_update_nfs_share_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'nfs_share': {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'}} - request_init["nfs_share"] = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3', 'nfs_share_id': 'nfs_share_id_value', 'id': 'id_value', 'state': 1, 'volume': 'volume_value', 'allowed_clients': [{'network': 'network_value', 'share_ip': 'share_ip_value', 'allowed_clients_cidr': 'allowed_clients_cidr_value', 'mount_permissions': 1, 'allow_dev': True, 'allow_suid': True, 'no_root_squash': True, 'nfs_path': 'nfs_path_value'}], 'labels': {}, 'requested_size_gib': 1917, 'storage_type': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_nfs_share.UpdateNfsShareRequest.meta.fields["nfs_share"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["nfs_share"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["nfs_share"][field])): - del request_init["nfs_share"][field][i][subfield] - else: - del request_init["nfs_share"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_nfs_share(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_nfs_share_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_nfs_share") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_nfs_share_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_update_nfs_share") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_nfs_share.UpdateNfsShareRequest.pb(gcb_nfs_share.UpdateNfsShareRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_nfs_share.UpdateNfsShareRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_nfs_share(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_nfs_share_rest_bad_request(request_type=gcb_nfs_share.CreateNfsShareRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_nfs_share(request) - - -@pytest.mark.parametrize("request_type", [ - gcb_nfs_share.CreateNfsShareRequest, - dict, -]) -def test_create_nfs_share_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["nfs_share"] = {'name': 'name_value', 'nfs_share_id': 'nfs_share_id_value', 'id': 'id_value', 'state': 1, 'volume': 'volume_value', 'allowed_clients': [{'network': 'network_value', 'share_ip': 'share_ip_value', 'allowed_clients_cidr': 'allowed_clients_cidr_value', 'mount_permissions': 1, 'allow_dev': True, 'allow_suid': True, 'no_root_squash': True, 'nfs_path': 'nfs_path_value'}], 'labels': {}, 'requested_size_gib': 1917, 'storage_type': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcb_nfs_share.CreateNfsShareRequest.meta.fields["nfs_share"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["nfs_share"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["nfs_share"][field])): - del request_init["nfs_share"][field][i][subfield] - else: - del request_init["nfs_share"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_nfs_share(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_nfs_share_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_nfs_share") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_nfs_share_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_create_nfs_share") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcb_nfs_share.CreateNfsShareRequest.pb(gcb_nfs_share.CreateNfsShareRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = gcb_nfs_share.CreateNfsShareRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_nfs_share(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_nfs_share_rest_bad_request(request_type=nfs_share.RenameNfsShareRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_nfs_share(request) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.RenameNfsShareRequest, - dict, -]) -def test_rename_nfs_share_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = nfs_share.NfsShare( - name='name_value', - nfs_share_id='nfs_share_id_value', - id='id_value', - state=nfs_share.NfsShare.State.PROVISIONED, - volume='volume_value', - requested_size_gib=1917, - storage_type=nfs_share.NfsShare.StorageType.SSD, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = nfs_share.NfsShare.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_nfs_share(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, nfs_share.NfsShare) - assert response.name == 'name_value' - assert response.nfs_share_id == 'nfs_share_id_value' - assert response.id == 'id_value' - assert response.state == nfs_share.NfsShare.State.PROVISIONED - assert response.volume == 'volume_value' - assert response.requested_size_gib == 1917 - assert response.storage_type == nfs_share.NfsShare.StorageType.SSD - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_nfs_share_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_nfs_share") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_nfs_share_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_rename_nfs_share") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = nfs_share.RenameNfsShareRequest.pb(nfs_share.RenameNfsShareRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = nfs_share.NfsShare.to_json(nfs_share.NfsShare()) - req.return_value.content = return_value - - request = nfs_share.RenameNfsShareRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = nfs_share.NfsShare() - post_with_metadata.return_value = nfs_share.NfsShare(), metadata - - client.rename_nfs_share(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_nfs_share_rest_bad_request(request_type=nfs_share.DeleteNfsShareRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_nfs_share(request) - - -@pytest.mark.parametrize("request_type", [ - nfs_share.DeleteNfsShareRequest, - dict, -]) -def test_delete_nfs_share_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/nfsShares/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_nfs_share(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_nfs_share_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_delete_nfs_share") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_delete_nfs_share_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_delete_nfs_share") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = nfs_share.DeleteNfsShareRequest.pb(nfs_share.DeleteNfsShareRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = nfs_share.DeleteNfsShareRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_nfs_share(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_provisioning_quotas_rest_bad_request(request_type=provisioning.ListProvisioningQuotasRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_provisioning_quotas(request) - - -@pytest.mark.parametrize("request_type", [ - provisioning.ListProvisioningQuotasRequest, - dict, -]) -def test_list_provisioning_quotas_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ListProvisioningQuotasResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ListProvisioningQuotasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_provisioning_quotas(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProvisioningQuotasPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_provisioning_quotas_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_provisioning_quotas") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_provisioning_quotas_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_provisioning_quotas") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = provisioning.ListProvisioningQuotasRequest.pb(provisioning.ListProvisioningQuotasRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = provisioning.ListProvisioningQuotasResponse.to_json(provisioning.ListProvisioningQuotasResponse()) - req.return_value.content = return_value - - request = provisioning.ListProvisioningQuotasRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning.ListProvisioningQuotasResponse() - post_with_metadata.return_value = provisioning.ListProvisioningQuotasResponse(), metadata - - client.list_provisioning_quotas(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_submit_provisioning_config_rest_bad_request(request_type=provisioning.SubmitProvisioningConfigRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.submit_provisioning_config(request) - - -@pytest.mark.parametrize("request_type", [ - provisioning.SubmitProvisioningConfigRequest, - dict, -]) -def test_submit_provisioning_config_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.SubmitProvisioningConfigResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.SubmitProvisioningConfigResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.submit_provisioning_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.SubmitProvisioningConfigResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_submit_provisioning_config_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_submit_provisioning_config") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_submit_provisioning_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_submit_provisioning_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = provisioning.SubmitProvisioningConfigRequest.pb(provisioning.SubmitProvisioningConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = provisioning.SubmitProvisioningConfigResponse.to_json(provisioning.SubmitProvisioningConfigResponse()) - req.return_value.content = return_value - - request = provisioning.SubmitProvisioningConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning.SubmitProvisioningConfigResponse() - post_with_metadata.return_value = provisioning.SubmitProvisioningConfigResponse(), metadata - - client.submit_provisioning_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_provisioning_config_rest_bad_request(request_type=provisioning.GetProvisioningConfigRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_provisioning_config(request) - - -@pytest.mark.parametrize("request_type", [ - provisioning.GetProvisioningConfigRequest, - dict, -]) -def test_get_provisioning_config_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_provisioning_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_provisioning_config_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_provisioning_config") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_get_provisioning_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_get_provisioning_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = provisioning.GetProvisioningConfigRequest.pb(provisioning.GetProvisioningConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = provisioning.ProvisioningConfig.to_json(provisioning.ProvisioningConfig()) - req.return_value.content = return_value - - request = provisioning.GetProvisioningConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning.ProvisioningConfig() - post_with_metadata.return_value = provisioning.ProvisioningConfig(), metadata - - client.get_provisioning_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_provisioning_config_rest_bad_request(request_type=provisioning.CreateProvisioningConfigRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_provisioning_config(request) - - -@pytest.mark.parametrize("request_type", [ - provisioning.CreateProvisioningConfigRequest, - dict, -]) -def test_create_provisioning_config_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["provisioning_config"] = {'name': 'name_value', 'instances': [{'name': 'name_value', 'id': 'id_value', 'instance_type': 'instance_type_value', 'hyperthreading': True, 'os_image': 'os_image_value', 'client_network': {'network_id': 'network_id_value', 'address': 'address_value', 'existing_network_id': 'existing_network_id_value'}, 'private_network': {}, 'user_note': 'user_note_value', 'account_networks_enabled': True, 'network_config': 1, 'network_template': 'network_template_value', 'logical_interfaces': [{'logical_network_interfaces': [{'network': 'network_value', 'ip_address': 'ip_address_value', 'default_gateway': True, 'network_type': 1, 'id': 'id_value'}], 'name': 'name_value', 'interface_index': 1576}], 'ssh_key_names': ['ssh_key_names_value1', 'ssh_key_names_value2']}], 'networks': [{'name': 'name_value', 'id': 'id_value', 'type_': 1, 'bandwidth': 1, 'vlan_attachments': [{'id': 'id_value', 'pairing_key': 'pairing_key_value'}], 'cidr': 'cidr_value', 'service_cidr': 1, 'user_note': 'user_note_value', 'gcp_service': 'gcp_service_value', 'vlan_same_project': True, 'jumbo_frames_enabled': True}], 'volumes': [{'name': 'name_value', 'id': 'id_value', 'snapshots_enabled': True, 'type_': 1, 'protocol': 1, 'size_gb': 739, 'lun_ranges': [{'quantity': 895, 'size_gb': 739}], 'machine_ids': ['machine_ids_value1', 'machine_ids_value2'], 'nfs_exports': [{'network_id': 'network_id_value', 'machine_id': 'machine_id_value', 'cidr': 'cidr_value', 'permissions': 1, 'no_root_squash': True, 'allow_suid': True, 'allow_dev': True}], 'user_note': 'user_note_value', 'gcp_service': 'gcp_service_value', 'performance_tier': 1}], 'ticket_id': 'ticket_id_value', 'handover_service_account': 'handover_service_account_value', 'email': 'email_value', 'state': 1, 'location': 'location_value', 'update_time': {'seconds': 751, 'nanos': 543}, 'cloud_console_uri': 'cloud_console_uri_value', 'vpc_sc_enabled': True, 'status_message': 'status_message_value', 'custom_id': 'custom_id_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = provisioning.CreateProvisioningConfigRequest.meta.fields["provisioning_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["provisioning_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["provisioning_config"][field])): - del request_init["provisioning_config"][field][i][subfield] - else: - del request_init["provisioning_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_provisioning_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_provisioning_config_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_provisioning_config") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_create_provisioning_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_create_provisioning_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = provisioning.CreateProvisioningConfigRequest.pb(provisioning.CreateProvisioningConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = provisioning.ProvisioningConfig.to_json(provisioning.ProvisioningConfig()) - req.return_value.content = return_value - - request = provisioning.CreateProvisioningConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning.ProvisioningConfig() - post_with_metadata.return_value = provisioning.ProvisioningConfig(), metadata - - client.create_provisioning_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_provisioning_config_rest_bad_request(request_type=provisioning.UpdateProvisioningConfigRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'provisioning_config': {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_provisioning_config(request) - - -@pytest.mark.parametrize("request_type", [ - provisioning.UpdateProvisioningConfigRequest, - dict, -]) -def test_update_provisioning_config_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'provisioning_config': {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3'}} - request_init["provisioning_config"] = {'name': 'projects/sample1/locations/sample2/provisioningConfigs/sample3', 'instances': [{'name': 'name_value', 'id': 'id_value', 'instance_type': 'instance_type_value', 'hyperthreading': True, 'os_image': 'os_image_value', 'client_network': {'network_id': 'network_id_value', 'address': 'address_value', 'existing_network_id': 'existing_network_id_value'}, 'private_network': {}, 'user_note': 'user_note_value', 'account_networks_enabled': True, 'network_config': 1, 'network_template': 'network_template_value', 'logical_interfaces': [{'logical_network_interfaces': [{'network': 'network_value', 'ip_address': 'ip_address_value', 'default_gateway': True, 'network_type': 1, 'id': 'id_value'}], 'name': 'name_value', 'interface_index': 1576}], 'ssh_key_names': ['ssh_key_names_value1', 'ssh_key_names_value2']}], 'networks': [{'name': 'name_value', 'id': 'id_value', 'type_': 1, 'bandwidth': 1, 'vlan_attachments': [{'id': 'id_value', 'pairing_key': 'pairing_key_value'}], 'cidr': 'cidr_value', 'service_cidr': 1, 'user_note': 'user_note_value', 'gcp_service': 'gcp_service_value', 'vlan_same_project': True, 'jumbo_frames_enabled': True}], 'volumes': [{'name': 'name_value', 'id': 'id_value', 'snapshots_enabled': True, 'type_': 1, 'protocol': 1, 'size_gb': 739, 'lun_ranges': [{'quantity': 895, 'size_gb': 739}], 'machine_ids': ['machine_ids_value1', 'machine_ids_value2'], 'nfs_exports': [{'network_id': 'network_id_value', 'machine_id': 'machine_id_value', 'cidr': 'cidr_value', 'permissions': 1, 'no_root_squash': True, 'allow_suid': True, 'allow_dev': True}], 'user_note': 'user_note_value', 'gcp_service': 'gcp_service_value', 'performance_tier': 1}], 'ticket_id': 'ticket_id_value', 'handover_service_account': 'handover_service_account_value', 'email': 'email_value', 'state': 1, 'location': 'location_value', 'update_time': {'seconds': 751, 'nanos': 543}, 'cloud_console_uri': 'cloud_console_uri_value', 'vpc_sc_enabled': True, 'status_message': 'status_message_value', 'custom_id': 'custom_id_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = provisioning.UpdateProvisioningConfigRequest.meta.fields["provisioning_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["provisioning_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["provisioning_config"][field])): - del request_init["provisioning_config"][field][i][subfield] - else: - del request_init["provisioning_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = provisioning.ProvisioningConfig( - name='name_value', - ticket_id='ticket_id_value', - handover_service_account='handover_service_account_value', - email='email_value', - state=provisioning.ProvisioningConfig.State.DRAFT, - location='location_value', - cloud_console_uri='cloud_console_uri_value', - vpc_sc_enabled=True, - status_message='status_message_value', - custom_id='custom_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = provisioning.ProvisioningConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_provisioning_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, provisioning.ProvisioningConfig) - assert response.name == 'name_value' - assert response.ticket_id == 'ticket_id_value' - assert response.handover_service_account == 'handover_service_account_value' - assert response.email == 'email_value' - assert response.state == provisioning.ProvisioningConfig.State.DRAFT - assert response.location == 'location_value' - assert response.cloud_console_uri == 'cloud_console_uri_value' - assert response.vpc_sc_enabled is True - assert response.status_message == 'status_message_value' - assert response.custom_id == 'custom_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_provisioning_config_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_provisioning_config") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_update_provisioning_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_update_provisioning_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = provisioning.UpdateProvisioningConfigRequest.pb(provisioning.UpdateProvisioningConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = provisioning.ProvisioningConfig.to_json(provisioning.ProvisioningConfig()) - req.return_value.content = return_value - - request = provisioning.UpdateProvisioningConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = provisioning.ProvisioningConfig() - post_with_metadata.return_value = provisioning.ProvisioningConfig(), metadata - - client.update_provisioning_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_network_rest_bad_request(request_type=network.RenameNetworkRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/networks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_network(request) - - -@pytest.mark.parametrize("request_type", [ - network.RenameNetworkRequest, - dict, -]) -def test_rename_network_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/networks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = network.Network( - name='name_value', - id='id_value', - type_=network.Network.Type.CLIENT, - ip_address='ip_address_value', - mac_address=['mac_address_value'], - state=network.Network.State.PROVISIONING, - vlan_id='vlan_id_value', - cidr='cidr_value', - services_cidr='services_cidr_value', - pod='pod_value', - jumbo_frames_enabled=True, - gateway_ip='gateway_ip_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = network.Network.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_network(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, network.Network) - assert response.name == 'name_value' - assert response.id == 'id_value' - assert response.type_ == network.Network.Type.CLIENT - assert response.ip_address == 'ip_address_value' - assert response.mac_address == ['mac_address_value'] - assert response.state == network.Network.State.PROVISIONING - assert response.vlan_id == 'vlan_id_value' - assert response.cidr == 'cidr_value' - assert response.services_cidr == 'services_cidr_value' - assert response.pod == 'pod_value' - assert response.jumbo_frames_enabled is True - assert response.gateway_ip == 'gateway_ip_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_network_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_network") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_rename_network_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_rename_network") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = network.RenameNetworkRequest.pb(network.RenameNetworkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = network.Network.to_json(network.Network()) - req.return_value.content = return_value - - request = network.RenameNetworkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = network.Network() - post_with_metadata.return_value = network.Network(), metadata - - client.rename_network(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_os_images_rest_bad_request(request_type=osimage.ListOSImagesRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_os_images(request) - - -@pytest.mark.parametrize("request_type", [ - osimage.ListOSImagesRequest, - dict, -]) -def test_list_os_images_rest_call_success(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = osimage.ListOSImagesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = osimage.ListOSImagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_os_images(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOSImagesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_os_images_rest_interceptors(null_interceptor): - transport = transports.BareMetalSolutionRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BareMetalSolutionRestInterceptor(), - ) - client = BareMetalSolutionClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_os_images") as post, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "post_list_os_images_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BareMetalSolutionRestInterceptor, "pre_list_os_images") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = osimage.ListOSImagesRequest.pb(osimage.ListOSImagesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = osimage.ListOSImagesResponse.to_json(osimage.ListOSImagesResponse()) - req.return_value.content = return_value - - request = osimage.ListOSImagesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = osimage.ListOSImagesResponse() - post_with_metadata.return_value = osimage.ListOSImagesResponse(), metadata - - client.list_os_images(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_initialize_client_w_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_instances_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - client.list_instances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.ListInstancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_instance_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - client.get_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.GetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_instance_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - client.update_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_instance.UpdateInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_instance_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_instance), - '__call__') as call: - client.rename_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.RenameInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_reset_instance_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.reset_instance), - '__call__') as call: - client.reset_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.ResetInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_start_instance_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_instance), - '__call__') as call: - client.start_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.StartInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_stop_instance_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.stop_instance), - '__call__') as call: - client.stop_instance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.StopInstanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_enable_interactive_serial_console_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enable_interactive_serial_console), - '__call__') as call: - client.enable_interactive_serial_console(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.EnableInteractiveSerialConsoleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_disable_interactive_serial_console_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.disable_interactive_serial_console), - '__call__') as call: - client.disable_interactive_serial_console(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = instance.DisableInteractiveSerialConsoleRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_detach_lun_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.detach_lun), - '__call__') as call: - client.detach_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_instance.DetachLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_ssh_keys_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_ssh_keys), - '__call__') as call: - client.list_ssh_keys(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ssh_key.ListSSHKeysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_ssh_key_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_ssh_key), - '__call__') as call: - client.create_ssh_key(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_ssh_key.CreateSSHKeyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_ssh_key_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_ssh_key), - '__call__') as call: - client.delete_ssh_key(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = ssh_key.DeleteSSHKeyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_volumes_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_volumes), - '__call__') as call: - client.list_volumes(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.ListVolumesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_volume_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_volume), - '__call__') as call: - client.get_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.GetVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_volume_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_volume), - '__call__') as call: - client.update_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume.UpdateVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_volume_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_volume), - '__call__') as call: - client.rename_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.RenameVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_evict_volume_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.evict_volume), - '__call__') as call: - client.evict_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume.EvictVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_resize_volume_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.resize_volume), - '__call__') as call: - client.resize_volume(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume.ResizeVolumeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_networks_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_networks), - '__call__') as call: - client.list_networks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.ListNetworksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_network_usage_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_network_usage), - '__call__') as call: - client.list_network_usage(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.ListNetworkUsageRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_network_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_network), - '__call__') as call: - client.get_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.GetNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_network_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_network), - '__call__') as call: - client.update_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_network.UpdateNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_volume_snapshot_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_volume_snapshot), - '__call__') as call: - client.create_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume_snapshot.CreateVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_volume_snapshot_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.restore_volume_snapshot), - '__call__') as call: - client.restore_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_volume_snapshot.RestoreVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_volume_snapshot_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_volume_snapshot), - '__call__') as call: - client.delete_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.DeleteVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_volume_snapshot_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_volume_snapshot), - '__call__') as call: - client.get_volume_snapshot(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.GetVolumeSnapshotRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_volume_snapshots_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_volume_snapshots), - '__call__') as call: - client.list_volume_snapshots(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = volume_snapshot.ListVolumeSnapshotsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lun_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lun), - '__call__') as call: - client.get_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.GetLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_luns_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_luns), - '__call__') as call: - client.list_luns(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.ListLunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_evict_lun_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.evict_lun), - '__call__') as call: - client.evict_lun(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = lun.EvictLunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_nfs_share_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_nfs_share), - '__call__') as call: - client.get_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.GetNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_nfs_shares_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_nfs_shares), - '__call__') as call: - client.list_nfs_shares(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.ListNfsSharesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_nfs_share_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_nfs_share), - '__call__') as call: - client.update_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_nfs_share.UpdateNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_nfs_share_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_nfs_share), - '__call__') as call: - client.create_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcb_nfs_share.CreateNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_nfs_share_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_nfs_share), - '__call__') as call: - client.rename_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.RenameNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_nfs_share_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_nfs_share), - '__call__') as call: - client.delete_nfs_share(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = nfs_share.DeleteNfsShareRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_provisioning_quotas_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_provisioning_quotas), - '__call__') as call: - client.list_provisioning_quotas(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.ListProvisioningQuotasRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_submit_provisioning_config_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.submit_provisioning_config), - '__call__') as call: - client.submit_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.SubmitProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_provisioning_config_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_provisioning_config), - '__call__') as call: - client.get_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.GetProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_provisioning_config_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_provisioning_config), - '__call__') as call: - client.create_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.CreateProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_provisioning_config_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_provisioning_config), - '__call__') as call: - client.update_provisioning_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = provisioning.UpdateProvisioningConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_network_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_network), - '__call__') as call: - client.rename_network(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = network.RenameNetworkRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_os_images_empty_call_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_os_images), - '__call__') as call: - client.list_os_images(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = osimage.ListOSImagesRequest() - - assert args[0] == request_msg - - -def test_bare_metal_solution_rest_lro_client(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BareMetalSolutionGrpcTransport, - ) - -def test_bare_metal_solution_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BareMetalSolutionTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_bare_metal_solution_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bare_metal_solution_v2.services.bare_metal_solution.transports.BareMetalSolutionTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BareMetalSolutionTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_instances', - 'get_instance', - 'update_instance', - 'rename_instance', - 'reset_instance', - 'start_instance', - 'stop_instance', - 'enable_interactive_serial_console', - 'disable_interactive_serial_console', - 'detach_lun', - 'list_ssh_keys', - 'create_ssh_key', - 'delete_ssh_key', - 'list_volumes', - 'get_volume', - 'update_volume', - 'rename_volume', - 'evict_volume', - 'resize_volume', - 'list_networks', - 'list_network_usage', - 'get_network', - 'update_network', - 'create_volume_snapshot', - 'restore_volume_snapshot', - 'delete_volume_snapshot', - 'get_volume_snapshot', - 'list_volume_snapshots', - 'get_lun', - 'list_luns', - 'evict_lun', - 'get_nfs_share', - 'list_nfs_shares', - 'update_nfs_share', - 'create_nfs_share', - 'rename_nfs_share', - 'delete_nfs_share', - 'list_provisioning_quotas', - 'submit_provisioning_config', - 'get_provisioning_config', - 'create_provisioning_config', - 'update_provisioning_config', - 'rename_network', - 'list_os_images', - 'get_location', - 'list_locations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_bare_metal_solution_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bare_metal_solution_v2.services.bare_metal_solution.transports.BareMetalSolutionTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BareMetalSolutionTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_bare_metal_solution_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bare_metal_solution_v2.services.bare_metal_solution.transports.BareMetalSolutionTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BareMetalSolutionTransport() - adc.assert_called_once() - - -def test_bare_metal_solution_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BareMetalSolutionClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BareMetalSolutionGrpcTransport, - transports.BareMetalSolutionGrpcAsyncIOTransport, - ], -) -def test_bare_metal_solution_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BareMetalSolutionGrpcTransport, - transports.BareMetalSolutionGrpcAsyncIOTransport, - transports.BareMetalSolutionRestTransport, - ], -) -def test_bare_metal_solution_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BareMetalSolutionGrpcTransport, grpc_helpers), - (transports.BareMetalSolutionGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_bare_metal_solution_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "baremetalsolution.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="baremetalsolution.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BareMetalSolutionGrpcTransport, transports.BareMetalSolutionGrpcAsyncIOTransport]) -def test_bare_metal_solution_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_bare_metal_solution_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BareMetalSolutionRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_bare_metal_solution_host_no_port(transport_name): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='baremetalsolution.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'baremetalsolution.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://baremetalsolution.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_bare_metal_solution_host_with_port(transport_name): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='baremetalsolution.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'baremetalsolution.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://baremetalsolution.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_bare_metal_solution_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = BareMetalSolutionClient( - credentials=creds1, - transport=transport_name, - ) - client2 = BareMetalSolutionClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.rename_instance._session - session2 = client2.transport.rename_instance._session - assert session1 != session2 - session1 = client1.transport.reset_instance._session - session2 = client2.transport.reset_instance._session - assert session1 != session2 - session1 = client1.transport.start_instance._session - session2 = client2.transport.start_instance._session - assert session1 != session2 - session1 = client1.transport.stop_instance._session - session2 = client2.transport.stop_instance._session - assert session1 != session2 - session1 = client1.transport.enable_interactive_serial_console._session - session2 = client2.transport.enable_interactive_serial_console._session - assert session1 != session2 - session1 = client1.transport.disable_interactive_serial_console._session - session2 = client2.transport.disable_interactive_serial_console._session - assert session1 != session2 - session1 = client1.transport.detach_lun._session - session2 = client2.transport.detach_lun._session - assert session1 != session2 - session1 = client1.transport.list_ssh_keys._session - session2 = client2.transport.list_ssh_keys._session - assert session1 != session2 - session1 = client1.transport.create_ssh_key._session - session2 = client2.transport.create_ssh_key._session - assert session1 != session2 - session1 = client1.transport.delete_ssh_key._session - session2 = client2.transport.delete_ssh_key._session - assert session1 != session2 - session1 = client1.transport.list_volumes._session - session2 = client2.transport.list_volumes._session - assert session1 != session2 - session1 = client1.transport.get_volume._session - session2 = client2.transport.get_volume._session - assert session1 != session2 - session1 = client1.transport.update_volume._session - session2 = client2.transport.update_volume._session - assert session1 != session2 - session1 = client1.transport.rename_volume._session - session2 = client2.transport.rename_volume._session - assert session1 != session2 - session1 = client1.transport.evict_volume._session - session2 = client2.transport.evict_volume._session - assert session1 != session2 - session1 = client1.transport.resize_volume._session - session2 = client2.transport.resize_volume._session - assert session1 != session2 - session1 = client1.transport.list_networks._session - session2 = client2.transport.list_networks._session - assert session1 != session2 - session1 = client1.transport.list_network_usage._session - session2 = client2.transport.list_network_usage._session - assert session1 != session2 - session1 = client1.transport.get_network._session - session2 = client2.transport.get_network._session - assert session1 != session2 - session1 = client1.transport.update_network._session - session2 = client2.transport.update_network._session - assert session1 != session2 - session1 = client1.transport.create_volume_snapshot._session - session2 = client2.transport.create_volume_snapshot._session - assert session1 != session2 - session1 = client1.transport.restore_volume_snapshot._session - session2 = client2.transport.restore_volume_snapshot._session - assert session1 != session2 - session1 = client1.transport.delete_volume_snapshot._session - session2 = client2.transport.delete_volume_snapshot._session - assert session1 != session2 - session1 = client1.transport.get_volume_snapshot._session - session2 = client2.transport.get_volume_snapshot._session - assert session1 != session2 - session1 = client1.transport.list_volume_snapshots._session - session2 = client2.transport.list_volume_snapshots._session - assert session1 != session2 - session1 = client1.transport.get_lun._session - session2 = client2.transport.get_lun._session - assert session1 != session2 - session1 = client1.transport.list_luns._session - session2 = client2.transport.list_luns._session - assert session1 != session2 - session1 = client1.transport.evict_lun._session - session2 = client2.transport.evict_lun._session - assert session1 != session2 - session1 = client1.transport.get_nfs_share._session - session2 = client2.transport.get_nfs_share._session - assert session1 != session2 - session1 = client1.transport.list_nfs_shares._session - session2 = client2.transport.list_nfs_shares._session - assert session1 != session2 - session1 = client1.transport.update_nfs_share._session - session2 = client2.transport.update_nfs_share._session - assert session1 != session2 - session1 = client1.transport.create_nfs_share._session - session2 = client2.transport.create_nfs_share._session - assert session1 != session2 - session1 = client1.transport.rename_nfs_share._session - session2 = client2.transport.rename_nfs_share._session - assert session1 != session2 - session1 = client1.transport.delete_nfs_share._session - session2 = client2.transport.delete_nfs_share._session - assert session1 != session2 - session1 = client1.transport.list_provisioning_quotas._session - session2 = client2.transport.list_provisioning_quotas._session - assert session1 != session2 - session1 = client1.transport.submit_provisioning_config._session - session2 = client2.transport.submit_provisioning_config._session - assert session1 != session2 - session1 = client1.transport.get_provisioning_config._session - session2 = client2.transport.get_provisioning_config._session - assert session1 != session2 - session1 = client1.transport.create_provisioning_config._session - session2 = client2.transport.create_provisioning_config._session - assert session1 != session2 - session1 = client1.transport.update_provisioning_config._session - session2 = client2.transport.update_provisioning_config._session - assert session1 != session2 - session1 = client1.transport.rename_network._session - session2 = client2.transport.rename_network._session - assert session1 != session2 - session1 = client1.transport.list_os_images._session - session2 = client2.transport.list_os_images._session - assert session1 != session2 -def test_bare_metal_solution_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BareMetalSolutionGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_bare_metal_solution_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BareMetalSolutionGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BareMetalSolutionGrpcTransport, transports.BareMetalSolutionGrpcAsyncIOTransport]) -def test_bare_metal_solution_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BareMetalSolutionGrpcTransport, transports.BareMetalSolutionGrpcAsyncIOTransport]) -def test_bare_metal_solution_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_bare_metal_solution_grpc_lro_client(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_bare_metal_solution_grpc_lro_async_client(): - client = BareMetalSolutionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) - actual = BareMetalSolutionClient.instance_path(project, location, instance) - assert expected == actual - - -def test_parse_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", - } - path = BareMetalSolutionClient.instance_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_instance_path(path) - assert expected == actual - -def test_instance_config_path(): - project = "cuttlefish" - location = "mussel" - instance_config = "winkle" - expected = "projects/{project}/locations/{location}/instanceConfigs/{instance_config}".format(project=project, location=location, instance_config=instance_config, ) - actual = BareMetalSolutionClient.instance_config_path(project, location, instance_config) - assert expected == actual - - -def test_parse_instance_config_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "instance_config": "abalone", - } - path = BareMetalSolutionClient.instance_config_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_instance_config_path(path) - assert expected == actual - -def test_instance_quota_path(): - project = "squid" - location = "clam" - instance_quota = "whelk" - expected = "projects/{project}/locations/{location}/instanceQuotas/{instance_quota}".format(project=project, location=location, instance_quota=instance_quota, ) - actual = BareMetalSolutionClient.instance_quota_path(project, location, instance_quota) - assert expected == actual - - -def test_parse_instance_quota_path(): - expected = { - "project": "octopus", - "location": "oyster", - "instance_quota": "nudibranch", - } - path = BareMetalSolutionClient.instance_quota_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_instance_quota_path(path) - assert expected == actual - -def test_interconnect_attachment_path(): - project = "cuttlefish" - region = "mussel" - interconnect_attachment = "winkle" - expected = "projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format(project=project, region=region, interconnect_attachment=interconnect_attachment, ) - actual = BareMetalSolutionClient.interconnect_attachment_path(project, region, interconnect_attachment) - assert expected == actual - - -def test_parse_interconnect_attachment_path(): - expected = { - "project": "nautilus", - "region": "scallop", - "interconnect_attachment": "abalone", - } - path = BareMetalSolutionClient.interconnect_attachment_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_interconnect_attachment_path(path) - assert expected == actual - -def test_lun_path(): - project = "squid" - location = "clam" - volume = "whelk" - lun = "octopus" - expected = "projects/{project}/locations/{location}/volumes/{volume}/luns/{lun}".format(project=project, location=location, volume=volume, lun=lun, ) - actual = BareMetalSolutionClient.lun_path(project, location, volume, lun) - assert expected == actual - - -def test_parse_lun_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "volume": "cuttlefish", - "lun": "mussel", - } - path = BareMetalSolutionClient.lun_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_lun_path(path) - assert expected == actual - -def test_network_path(): - project = "winkle" - location = "nautilus" - network = "scallop" - expected = "projects/{project}/locations/{location}/networks/{network}".format(project=project, location=location, network=network, ) - actual = BareMetalSolutionClient.network_path(project, location, network) - assert expected == actual - - -def test_parse_network_path(): - expected = { - "project": "abalone", - "location": "squid", - "network": "clam", - } - path = BareMetalSolutionClient.network_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_network_path(path) - assert expected == actual - -def test_network_config_path(): - project = "whelk" - location = "octopus" - network_config = "oyster" - expected = "projects/{project}/locations/{location}/networkConfigs/{network_config}".format(project=project, location=location, network_config=network_config, ) - actual = BareMetalSolutionClient.network_config_path(project, location, network_config) - assert expected == actual - - -def test_parse_network_config_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "network_config": "mussel", - } - path = BareMetalSolutionClient.network_config_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_network_config_path(path) - assert expected == actual - -def test_nfs_share_path(): - project = "winkle" - location = "nautilus" - nfs_share = "scallop" - expected = "projects/{project}/locations/{location}/nfsShares/{nfs_share}".format(project=project, location=location, nfs_share=nfs_share, ) - actual = BareMetalSolutionClient.nfs_share_path(project, location, nfs_share) - assert expected == actual - - -def test_parse_nfs_share_path(): - expected = { - "project": "abalone", - "location": "squid", - "nfs_share": "clam", - } - path = BareMetalSolutionClient.nfs_share_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_nfs_share_path(path) - assert expected == actual - -def test_os_image_path(): - project = "whelk" - location = "octopus" - os_image = "oyster" - expected = "projects/{project}/locations/{location}/osImages/{os_image}".format(project=project, location=location, os_image=os_image, ) - actual = BareMetalSolutionClient.os_image_path(project, location, os_image) - assert expected == actual - - -def test_parse_os_image_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "os_image": "mussel", - } - path = BareMetalSolutionClient.os_image_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_os_image_path(path) - assert expected == actual - -def test_provisioning_config_path(): - project = "winkle" - location = "nautilus" - provisioning_config = "scallop" - expected = "projects/{project}/locations/{location}/provisioningConfigs/{provisioning_config}".format(project=project, location=location, provisioning_config=provisioning_config, ) - actual = BareMetalSolutionClient.provisioning_config_path(project, location, provisioning_config) - assert expected == actual - - -def test_parse_provisioning_config_path(): - expected = { - "project": "abalone", - "location": "squid", - "provisioning_config": "clam", - } - path = BareMetalSolutionClient.provisioning_config_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_provisioning_config_path(path) - assert expected == actual - -def test_provisioning_quota_path(): - project = "whelk" - location = "octopus" - provisioning_quota = "oyster" - expected = "projects/{project}/locations/{location}/provisioningQuotas/{provisioning_quota}".format(project=project, location=location, provisioning_quota=provisioning_quota, ) - actual = BareMetalSolutionClient.provisioning_quota_path(project, location, provisioning_quota) - assert expected == actual - - -def test_parse_provisioning_quota_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "provisioning_quota": "mussel", - } - path = BareMetalSolutionClient.provisioning_quota_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_provisioning_quota_path(path) - assert expected == actual - -def test_server_network_template_path(): - project = "winkle" - location = "nautilus" - server_network_template = "scallop" - expected = "projects/{project}/locations/{location}/serverNetworkTemplate/{server_network_template}".format(project=project, location=location, server_network_template=server_network_template, ) - actual = BareMetalSolutionClient.server_network_template_path(project, location, server_network_template) - assert expected == actual - - -def test_parse_server_network_template_path(): - expected = { - "project": "abalone", - "location": "squid", - "server_network_template": "clam", - } - path = BareMetalSolutionClient.server_network_template_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_server_network_template_path(path) - assert expected == actual - -def test_ssh_key_path(): - project = "whelk" - location = "octopus" - ssh_key = "oyster" - expected = "projects/{project}/locations/{location}/sshKeys/{ssh_key}".format(project=project, location=location, ssh_key=ssh_key, ) - actual = BareMetalSolutionClient.ssh_key_path(project, location, ssh_key) - assert expected == actual - - -def test_parse_ssh_key_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "ssh_key": "mussel", - } - path = BareMetalSolutionClient.ssh_key_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_ssh_key_path(path) - assert expected == actual - -def test_volume_path(): - project = "winkle" - location = "nautilus" - volume = "scallop" - expected = "projects/{project}/locations/{location}/volumes/{volume}".format(project=project, location=location, volume=volume, ) - actual = BareMetalSolutionClient.volume_path(project, location, volume) - assert expected == actual - - -def test_parse_volume_path(): - expected = { - "project": "abalone", - "location": "squid", - "volume": "clam", - } - path = BareMetalSolutionClient.volume_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_volume_path(path) - assert expected == actual - -def test_volume_config_path(): - project = "whelk" - location = "octopus" - volume_config = "oyster" - expected = "projects/{project}/locations/{location}/volumeConfigs/{volume_config}".format(project=project, location=location, volume_config=volume_config, ) - actual = BareMetalSolutionClient.volume_config_path(project, location, volume_config) - assert expected == actual - - -def test_parse_volume_config_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "volume_config": "mussel", - } - path = BareMetalSolutionClient.volume_config_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_volume_config_path(path) - assert expected == actual - -def test_volume_snapshot_path(): - project = "winkle" - location = "nautilus" - volume = "scallop" - snapshot = "abalone" - expected = "projects/{project}/locations/{location}/volumes/{volume}/snapshots/{snapshot}".format(project=project, location=location, volume=volume, snapshot=snapshot, ) - actual = BareMetalSolutionClient.volume_snapshot_path(project, location, volume, snapshot) - assert expected == actual - - -def test_parse_volume_snapshot_path(): - expected = { - "project": "squid", - "location": "clam", - "volume": "whelk", - "snapshot": "octopus", - } - path = BareMetalSolutionClient.volume_snapshot_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_volume_snapshot_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BareMetalSolutionClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = BareMetalSolutionClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = BareMetalSolutionClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = BareMetalSolutionClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BareMetalSolutionClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = BareMetalSolutionClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = BareMetalSolutionClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = BareMetalSolutionClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BareMetalSolutionClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = BareMetalSolutionClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BareMetalSolutionClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BareMetalSolutionTransport, '_prep_wrapped_messages') as prep: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BareMetalSolutionTransport, '_prep_wrapped_messages') as prep: - transport_class = BareMetalSolutionClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_list_locations(transport: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = BareMetalSolutionAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = BareMetalSolutionClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BareMetalSolutionClient, transports.BareMetalSolutionGrpcTransport), - (BareMetalSolutionAsyncClient, transports.BareMetalSolutionGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-batch/v1/.coveragerc b/owl-bot-staging/google-cloud-batch/v1/.coveragerc deleted file mode 100644 index e17b1e03c7a6..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/batch/__init__.py - google/cloud/batch/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-batch/v1/.flake8 b/owl-bot-staging/google-cloud-batch/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-batch/v1/MANIFEST.in b/owl-bot-staging/google-cloud-batch/v1/MANIFEST.in deleted file mode 100644 index 612a16ef3412..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/batch *.py -recursive-include google/cloud/batch_v1 *.py diff --git a/owl-bot-staging/google-cloud-batch/v1/README.rst b/owl-bot-staging/google-cloud-batch/v1/README.rst deleted file mode 100644 index 73c2dd5d3ee8..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Batch API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Batch API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-batch/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-batch/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/batch_service.rst b/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/batch_service.rst deleted file mode 100644 index 61da9e28e90b..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/batch_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -BatchService ------------------------------- - -.. automodule:: google.cloud.batch_v1.services.batch_service - :members: - :inherited-members: - -.. automodule:: google.cloud.batch_v1.services.batch_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/services_.rst b/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/services_.rst deleted file mode 100644 index 71d53c0f5169..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Batch v1 API -====================================== -.. toctree:: - :maxdepth: 2 - - batch_service diff --git a/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/types_.rst b/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/types_.rst deleted file mode 100644 index e56f8a2d9535..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/docs/batch_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Batch v1 API -=================================== - -.. automodule:: google.cloud.batch_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-batch/v1/docs/conf.py b/owl-bot-staging/google-cloud-batch/v1/docs/conf.py deleted file mode 100644 index cf072699db6e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-batch documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-batch" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-batch-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-batch.tex", - u"google-cloud-batch Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-batch", - u"Google Cloud Batch Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-batch", - u"google-cloud-batch Documentation", - author, - "google-cloud-batch", - "GAPIC library for Google Cloud Batch API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-batch/v1/docs/index.rst b/owl-bot-staging/google-cloud-batch/v1/docs/index.rst deleted file mode 100644 index 25bcc8d6adf3..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - batch_v1/services_ - batch_v1/types_ diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/__init__.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/__init__.py deleted file mode 100644 index 6483473fce24..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/__init__.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.batch import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.batch_v1.services.batch_service.client import BatchServiceClient -from google.cloud.batch_v1.services.batch_service.async_client import BatchServiceAsyncClient - -from google.cloud.batch_v1.types.batch import CancelJobRequest -from google.cloud.batch_v1.types.batch import CancelJobResponse -from google.cloud.batch_v1.types.batch import CreateJobRequest -from google.cloud.batch_v1.types.batch import DeleteJobRequest -from google.cloud.batch_v1.types.batch import GetJobRequest -from google.cloud.batch_v1.types.batch import GetTaskRequest -from google.cloud.batch_v1.types.batch import ListJobsRequest -from google.cloud.batch_v1.types.batch import ListJobsResponse -from google.cloud.batch_v1.types.batch import ListTasksRequest -from google.cloud.batch_v1.types.batch import ListTasksResponse -from google.cloud.batch_v1.types.batch import OperationMetadata -from google.cloud.batch_v1.types.job import AllocationPolicy -from google.cloud.batch_v1.types.job import Job -from google.cloud.batch_v1.types.job import JobNotification -from google.cloud.batch_v1.types.job import JobStatus -from google.cloud.batch_v1.types.job import LogsPolicy -from google.cloud.batch_v1.types.job import ServiceAccount -from google.cloud.batch_v1.types.job import TaskGroup -from google.cloud.batch_v1.types.task import ComputeResource -from google.cloud.batch_v1.types.task import Environment -from google.cloud.batch_v1.types.task import LifecyclePolicy -from google.cloud.batch_v1.types.task import Runnable -from google.cloud.batch_v1.types.task import StatusEvent -from google.cloud.batch_v1.types.task import Task -from google.cloud.batch_v1.types.task import TaskExecution -from google.cloud.batch_v1.types.task import TaskSpec -from google.cloud.batch_v1.types.task import TaskStatus -from google.cloud.batch_v1.types.volume import GCS -from google.cloud.batch_v1.types.volume import NFS -from google.cloud.batch_v1.types.volume import Volume - -__all__ = ('BatchServiceClient', - 'BatchServiceAsyncClient', - 'CancelJobRequest', - 'CancelJobResponse', - 'CreateJobRequest', - 'DeleteJobRequest', - 'GetJobRequest', - 'GetTaskRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'OperationMetadata', - 'AllocationPolicy', - 'Job', - 'JobNotification', - 'JobStatus', - 'LogsPolicy', - 'ServiceAccount', - 'TaskGroup', - 'ComputeResource', - 'Environment', - 'LifecyclePolicy', - 'Runnable', - 'StatusEvent', - 'Task', - 'TaskExecution', - 'TaskSpec', - 'TaskStatus', - 'GCS', - 'NFS', - 'Volume', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/gapic_version.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/py.typed b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/py.typed deleted file mode 100644 index 32c66c8cc211..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-batch package uses inline types. diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/__init__.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/__init__.py deleted file mode 100644 index 4da8a7a67a54..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.batch_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.batch_service import BatchServiceClient -from .services.batch_service import BatchServiceAsyncClient - -from .types.batch import CancelJobRequest -from .types.batch import CancelJobResponse -from .types.batch import CreateJobRequest -from .types.batch import DeleteJobRequest -from .types.batch import GetJobRequest -from .types.batch import GetTaskRequest -from .types.batch import ListJobsRequest -from .types.batch import ListJobsResponse -from .types.batch import ListTasksRequest -from .types.batch import ListTasksResponse -from .types.batch import OperationMetadata -from .types.job import AllocationPolicy -from .types.job import Job -from .types.job import JobNotification -from .types.job import JobStatus -from .types.job import LogsPolicy -from .types.job import ServiceAccount -from .types.job import TaskGroup -from .types.task import ComputeResource -from .types.task import Environment -from .types.task import LifecyclePolicy -from .types.task import Runnable -from .types.task import StatusEvent -from .types.task import Task -from .types.task import TaskExecution -from .types.task import TaskSpec -from .types.task import TaskStatus -from .types.volume import GCS -from .types.volume import NFS -from .types.volume import Volume - -__all__ = ( - 'BatchServiceAsyncClient', -'AllocationPolicy', -'BatchServiceClient', -'CancelJobRequest', -'CancelJobResponse', -'ComputeResource', -'CreateJobRequest', -'DeleteJobRequest', -'Environment', -'GCS', -'GetJobRequest', -'GetTaskRequest', -'Job', -'JobNotification', -'JobStatus', -'LifecyclePolicy', -'ListJobsRequest', -'ListJobsResponse', -'ListTasksRequest', -'ListTasksResponse', -'LogsPolicy', -'NFS', -'OperationMetadata', -'Runnable', -'ServiceAccount', -'StatusEvent', -'Task', -'TaskExecution', -'TaskGroup', -'TaskSpec', -'TaskStatus', -'Volume', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/gapic_metadata.json deleted file mode 100644 index d4c30aa61b47..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/gapic_metadata.json +++ /dev/null @@ -1,133 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.batch_v1", - "protoPackage": "google.cloud.batch.v1", - "schema": "1.0", - "services": { - "BatchService": { - "clients": { - "grpc": { - "libraryClient": "BatchServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BatchServiceAsyncClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - } - } - }, - "rest": { - "libraryClient": "BatchServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/gapic_version.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/py.typed b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/py.typed deleted file mode 100644 index 32c66c8cc211..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-batch package uses inline types. diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/__init__.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/__init__.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/__init__.py deleted file mode 100644 index 17519904ba19..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BatchServiceClient -from .async_client import BatchServiceAsyncClient - -__all__ = ( - 'BatchServiceClient', - 'BatchServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/async_client.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/async_client.py deleted file mode 100644 index 8b0efc67958f..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/async_client.py +++ /dev/null @@ -1,1399 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.batch_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1.services.batch_service import pagers -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .client import BatchServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class BatchServiceAsyncClient: - """Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - """ - - _client: BatchServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = BatchServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BatchServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = BatchServiceClient._DEFAULT_UNIVERSE - - job_path = staticmethod(BatchServiceClient.job_path) - parse_job_path = staticmethod(BatchServiceClient.parse_job_path) - task_path = staticmethod(BatchServiceClient.task_path) - parse_task_path = staticmethod(BatchServiceClient.parse_task_path) - task_group_path = staticmethod(BatchServiceClient.task_group_path) - parse_task_group_path = staticmethod(BatchServiceClient.parse_task_group_path) - common_billing_account_path = staticmethod(BatchServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BatchServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BatchServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(BatchServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(BatchServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(BatchServiceClient.parse_common_organization_path) - common_project_path = staticmethod(BatchServiceClient.common_project_path) - parse_common_project_path = staticmethod(BatchServiceClient.parse_common_project_path) - common_location_path = staticmethod(BatchServiceClient.common_location_path) - parse_common_location_path = staticmethod(BatchServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceAsyncClient: The constructed client. - """ - return BatchServiceClient.from_service_account_info.__func__(BatchServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceAsyncClient: The constructed client. - """ - return BatchServiceClient.from_service_account_file.__func__(BatchServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BatchServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BatchServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BatchServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = BatchServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BatchServiceTransport, Callable[..., BatchServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the batch service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BatchServiceTransport,Callable[..., BatchServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BatchServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BatchServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.batch_v1.BatchServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.batch.v1.BatchService", - "credentialsType": None, - } - ) - - async def create_job(self, - request: Optional[Union[batch.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[gcb_job.Job] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_job.Job: - r"""Create a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_create_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.CreateJobRequest, dict]]): - The request object. CreateJob Request. - parent (:class:`str`): - Required. The parent resource name - where the Job will be created. Pattern: - "projects/{project}/locations/{location}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.batch_v1.types.Job`): - Required. The Job to create. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (:class:`str`): - ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters - and must start with lowercase characters. Only lowercase - characters, numbers and '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. - - The job.name field in the request will be ignored and - the created resource name of the Job will be - "{parent}/jobs/{job_id}". - - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job, job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CreateJobRequest): - request = batch.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[batch.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> job.Job: - r"""Get a Job specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_get_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.GetJobRequest, dict]]): - The request object. GetJob Request. - name (:class:`str`): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetJobRequest): - request = batch.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: Optional[Union[batch.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_delete_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.DeleteJobRequest, dict]]): - The request object. DeleteJob Request. - name (:class:`str`): - Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.DeleteJobRequest): - request = batch.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - async def cancel_job(self, - request: Optional[Union[batch.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Cancel a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_cancel_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.CancelJobRequest, dict]]): - The request object. CancelJob Request. - name (:class:`str`): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.batch_v1.types.CancelJobResponse` - Response to the CancelJob request. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CancelJobRequest): - request = batch.CancelJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - batch.CancelJobResponse, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[batch.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""List all Jobs for a project within a region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_list_jobs(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.ListJobsRequest, dict]]): - The request object. ListJob Request. - parent (:class:`str`): - Parent path. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.services.batch_service.pagers.ListJobsAsyncPager: - ListJob Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListJobsRequest): - request = batch.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: Optional[Union[batch.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> task.Task: - r"""Return a single Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_get_task(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.GetTaskRequest, dict]]): - The request object. Request for a single Task by name. - name (:class:`str`): - Required. Task name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.types.Task: - A Cloud Batch task. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetTaskRequest): - request = batch.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: Optional[Union[batch.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""List Tasks associated with a job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - async def sample_list_tasks(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1.types.ListTasksRequest, dict]]): - The request object. ListTasks Request. - parent (:class:`str`): - Required. Name of a TaskGroup from which Tasks are being - requested. Pattern: - "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.services.batch_service.pagers.ListTasksAsyncPager: - ListTasks Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListTasksRequest): - request = batch.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "BatchServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BatchServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/client.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/client.py deleted file mode 100644 index 3f44fd93eaff..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/client.py +++ /dev/null @@ -1,1783 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.batch_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1.services.batch_service import pagers -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BatchServiceGrpcTransport -from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .transports.rest import BatchServiceRestTransport - - -class BatchServiceClientMeta(type): - """Metaclass for the BatchService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BatchServiceTransport]] - _transport_registry["grpc"] = BatchServiceGrpcTransport - _transport_registry["grpc_asyncio"] = BatchServiceGrpcAsyncIOTransport - _transport_registry["rest"] = BatchServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BatchServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BatchServiceClient(metaclass=BatchServiceClientMeta): - """Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "batch.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "batch.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BatchServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BatchServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def job_path(project: str,location: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,job: str,task_group: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}/tasks/{task}".format(project=project, location=location, job=job, task_group=task_group, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)/taskGroups/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_group_path(project: str,location: str,job: str,task_group: str,) -> str: - """Returns a fully-qualified task_group string.""" - return "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}".format(project=project, location=location, job=job, task_group=task_group, ) - - @staticmethod - def parse_task_group_path(path: str) -> Dict[str,str]: - """Parses a task_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)/taskGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = BatchServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = BatchServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = BatchServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BatchServiceTransport, Callable[..., BatchServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the batch service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BatchServiceTransport,Callable[..., BatchServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BatchServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BatchServiceClient._read_environment_variables() - self._client_cert_source = BatchServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BatchServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, BatchServiceTransport) - if transport_provided: - # transport is a BatchServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(BatchServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - BatchServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[BatchServiceTransport], Callable[..., BatchServiceTransport]] = ( - BatchServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., BatchServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.batch_v1.BatchServiceClient`.", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.batch.v1.BatchService", - "credentialsType": None, - } - ) - - def create_job(self, - request: Optional[Union[batch.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[gcb_job.Job] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_job.Job: - r"""Create a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_create_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.CreateJobRequest, dict]): - The request object. CreateJob Request. - parent (str): - Required. The parent resource name - where the Job will be created. Pattern: - "projects/{project}/locations/{location}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.batch_v1.types.Job): - Required. The Job to create. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (str): - ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters - and must start with lowercase characters. Only lowercase - characters, numbers and '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. - - The job.name field in the request will be ignored and - the created resource name of the Job will be - "{parent}/jobs/{job_id}". - - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job, job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CreateJobRequest): - request = batch.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[batch.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> job.Job: - r"""Get a Job specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_get_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.GetJobRequest, dict]): - The request object. GetJob Request. - name (str): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetJobRequest): - request = batch.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: Optional[Union[batch.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_delete_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.DeleteJobRequest, dict]): - The request object. DeleteJob Request. - name (str): - Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.DeleteJobRequest): - request = batch.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - def cancel_job(self, - request: Optional[Union[batch.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Cancel a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_cancel_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.CancelJobRequest, dict]): - The request object. CancelJob Request. - name (str): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.batch_v1.types.CancelJobResponse` - Response to the CancelJob request. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CancelJobRequest): - request = batch.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - batch.CancelJobResponse, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[batch.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsPager: - r"""List all Jobs for a project within a region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_list_jobs(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.ListJobsRequest, dict]): - The request object. ListJob Request. - parent (str): - Parent path. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.services.batch_service.pagers.ListJobsPager: - ListJob Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListJobsRequest): - request = batch.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: Optional[Union[batch.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> task.Task: - r"""Return a single Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_get_task(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.GetTaskRequest, dict]): - The request object. Request for a single Task by name. - name (str): - Required. Task name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.types.Task: - A Cloud Batch task. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetTaskRequest): - request = batch.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: Optional[Union[batch.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTasksPager: - r"""List Tasks associated with a job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1 - - def sample_list_tasks(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.batch_v1.types.ListTasksRequest, dict]): - The request object. ListTasks Request. - parent (str): - Required. Name of a TaskGroup from which Tasks are being - requested. Pattern: - "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1.services.batch_service.pagers.ListTasksPager: - ListTasks Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListTasksRequest): - request = batch.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "BatchServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BatchServiceClient", -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/pagers.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/pagers.py deleted file mode 100644 index 32beef6ef851..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/pagers.py +++ /dev/null @@ -1,307 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import task - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., batch.ListJobsResponse], - request: batch.ListJobsRequest, - response: batch.ListJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.batch_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[batch.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[job.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[batch.ListJobsResponse]], - request: batch.ListJobsRequest, - response: batch.ListJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.batch_v1.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[batch.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[job.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., batch.ListTasksResponse], - request: batch.ListTasksRequest, - response: batch.ListTasksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1.types.ListTasksRequest): - The initial request object. - response (google.cloud.batch_v1.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[batch.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[task.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[batch.ListTasksResponse]], - request: batch.ListTasksRequest, - response: batch.ListTasksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1.types.ListTasksRequest): - The initial request object. - response (google.cloud.batch_v1.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[batch.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[task.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/README.rst b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/README.rst deleted file mode 100644 index c2f8f5a0e133..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`BatchServiceTransport` is the ABC for all transports. -- public child `BatchServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `BatchServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseBatchServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `BatchServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/__init__.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/__init__.py deleted file mode 100644 index b14b7c9b9878..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BatchServiceTransport -from .grpc import BatchServiceGrpcTransport -from .grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .rest import BatchServiceRestTransport -from .rest import BatchServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BatchServiceTransport]] -_transport_registry['grpc'] = BatchServiceGrpcTransport -_transport_registry['grpc_asyncio'] = BatchServiceGrpcAsyncIOTransport -_transport_registry['rest'] = BatchServiceRestTransport - -__all__ = ( - 'BatchServiceTransport', - 'BatchServiceGrpcTransport', - 'BatchServiceGrpcAsyncIOTransport', - 'BatchServiceRestTransport', - 'BatchServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/base.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/base.py deleted file mode 100644 index 040e2a505ae1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/base.py +++ /dev/null @@ -1,367 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.batch_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BatchServiceTransport(abc.ABC): - """Abstract transport class for BatchService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'batch.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: gapic_v1.method.wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - Union[ - gcb_job.Job, - Awaitable[gcb_job.Job] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - Union[ - job.Job, - Awaitable[job.Job] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - Union[ - batch.ListJobsResponse, - Awaitable[batch.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - Union[ - batch.ListTasksResponse, - Awaitable[batch.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BatchServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/grpc.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/grpc.py deleted file mode 100644 index 9e4c82a65a19..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/grpc.py +++ /dev/null @@ -1,636 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BatchServiceGrpcTransport(BatchServiceTransport): - """gRPC backend transport for BatchService. - - Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - gcb_job.Job]: - r"""Return a callable for the create job method over gRPC. - - Create a Job. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/CreateJob', - request_serializer=batch.CreateJobRequest.serialize, - response_deserializer=gcb_job.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - job.Job]: - r"""Return a callable for the get job method over gRPC. - - Get a Job specified by its resource name. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/GetJob', - request_serializer=batch.GetJobRequest.serialize, - response_deserializer=job.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete job method over gRPC. - - Delete a Job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/DeleteJob', - request_serializer=batch.DeleteJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_job'] - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel a Job. - - Returns: - Callable[[~.CancelJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/CancelJob', - request_serializer=batch.CancelJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['cancel_job'] - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - batch.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - List all Jobs for a project within a region. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/ListJobs', - request_serializer=batch.ListJobsRequest.serialize, - response_deserializer=batch.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - task.Task]: - r"""Return a callable for the get task method over gRPC. - - Return a single Task. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/GetTask', - request_serializer=batch.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - batch.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - List Tasks associated with a job. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/ListTasks', - request_serializer=batch.ListTasksRequest.serialize, - response_deserializer=batch.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BatchServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py deleted file mode 100644 index bea0f09fe0c1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,752 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import BatchServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BatchServiceGrpcAsyncIOTransport(BatchServiceTransport): - """gRPC AsyncIO backend transport for BatchService. - - Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - Awaitable[gcb_job.Job]]: - r"""Return a callable for the create job method over gRPC. - - Create a Job. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/CreateJob', - request_serializer=batch.CreateJobRequest.serialize, - response_deserializer=gcb_job.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - Awaitable[job.Job]]: - r"""Return a callable for the get job method over gRPC. - - Get a Job specified by its resource name. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/GetJob', - request_serializer=batch.GetJobRequest.serialize, - response_deserializer=job.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete job method over gRPC. - - Delete a Job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/DeleteJob', - request_serializer=batch.DeleteJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_job'] - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel a Job. - - Returns: - Callable[[~.CancelJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/CancelJob', - request_serializer=batch.CancelJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['cancel_job'] - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - Awaitable[batch.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - List all Jobs for a project within a region. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/ListJobs', - request_serializer=batch.ListJobsRequest.serialize, - response_deserializer=batch.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the get task method over gRPC. - - Return a single Task. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/GetTask', - request_serializer=batch.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - Awaitable[batch.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - List Tasks associated with a job. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1.BatchService/ListTasks', - request_serializer=batch.ListTasksRequest.serialize, - response_deserializer=batch.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_job: self._wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: self._wrap_method( - self.get_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: self._wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: self._wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: self._wrap_method( - self.list_jobs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: self._wrap_method( - self.get_task, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: self._wrap_method( - self.list_tasks, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'BatchServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/rest.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/rest.py deleted file mode 100644 index f43b3a4df7bd..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/rest.py +++ /dev/null @@ -1,2225 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseBatchServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class BatchServiceRestInterceptor: - """Interceptor for BatchService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the BatchServiceRestTransport. - - .. code-block:: python - class MyCustomBatchServiceInterceptor(BatchServiceRestInterceptor): - def pre_cancel_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_cancel_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_tasks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_tasks(self, response): - logging.log(f"Received response: {response}") - return response - - transport = BatchServiceRestTransport(interceptor=MyCustomBatchServiceInterceptor()) - client = BatchServiceClient(transport=transport) - - - """ - def pre_cancel_job(self, request: batch.CancelJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_cancel_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for cancel_job - - DEPRECATED. Please use the `post_cancel_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_cancel_job` interceptor runs - before the `post_cancel_job_with_metadata` interceptor. - """ - return response - - def post_cancel_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for cancel_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_cancel_job_with_metadata` - interceptor in new development instead of the `post_cancel_job` interceptor. - When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the - `post_cancel_job` interceptor. The (possibly modified) response returned by - `post_cancel_job` will be passed to - `post_cancel_job_with_metadata`. - """ - return response, metadata - - def pre_create_job(self, request: batch.CreateJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_create_job(self, response: gcb_job.Job) -> gcb_job.Job: - """Post-rpc interceptor for create_job - - DEPRECATED. Please use the `post_create_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_create_job` interceptor runs - before the `post_create_job_with_metadata` interceptor. - """ - return response - - def post_create_job_with_metadata(self, response: gcb_job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_create_job_with_metadata` - interceptor in new development instead of the `post_create_job` interceptor. - When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the - `post_create_job` interceptor. The (possibly modified) response returned by - `post_create_job` will be passed to - `post_create_job_with_metadata`. - """ - return response, metadata - - def pre_delete_job(self, request: batch.DeleteJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.DeleteJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_delete_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_job - - DEPRECATED. Please use the `post_delete_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_delete_job` interceptor runs - before the `post_delete_job_with_metadata` interceptor. - """ - return response - - def post_delete_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_delete_job_with_metadata` - interceptor in new development instead of the `post_delete_job` interceptor. - When both interceptors are used, this `post_delete_job_with_metadata` interceptor runs after the - `post_delete_job` interceptor. The (possibly modified) response returned by - `post_delete_job` will be passed to - `post_delete_job_with_metadata`. - """ - return response, metadata - - def pre_get_job(self, request: batch.GetJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_job(self, response: job.Job) -> job.Job: - """Post-rpc interceptor for get_job - - DEPRECATED. Please use the `post_get_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_get_job` interceptor runs - before the `post_get_job_with_metadata` interceptor. - """ - return response - - def post_get_job_with_metadata(self, response: job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_get_job_with_metadata` - interceptor in new development instead of the `post_get_job` interceptor. - When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the - `post_get_job` interceptor. The (possibly modified) response returned by - `post_get_job` will be passed to - `post_get_job_with_metadata`. - """ - return response, metadata - - def pre_get_task(self, request: batch.GetTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.GetTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_task(self, response: task.Task) -> task.Task: - """Post-rpc interceptor for get_task - - DEPRECATED. Please use the `post_get_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_get_task` interceptor runs - before the `post_get_task_with_metadata` interceptor. - """ - return response - - def post_get_task_with_metadata(self, response: task.Task, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[task.Task, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_get_task_with_metadata` - interceptor in new development instead of the `post_get_task` interceptor. - When both interceptors are used, this `post_get_task_with_metadata` interceptor runs after the - `post_get_task` interceptor. The (possibly modified) response returned by - `post_get_task` will be passed to - `post_get_task_with_metadata`. - """ - return response, metadata - - def pre_list_jobs(self, request: batch.ListJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_jobs(self, response: batch.ListJobsResponse) -> batch.ListJobsResponse: - """Post-rpc interceptor for list_jobs - - DEPRECATED. Please use the `post_list_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_list_jobs` interceptor runs - before the `post_list_jobs_with_metadata` interceptor. - """ - return response - - def post_list_jobs_with_metadata(self, response: batch.ListJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_list_jobs_with_metadata` - interceptor in new development instead of the `post_list_jobs` interceptor. - When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the - `post_list_jobs` interceptor. The (possibly modified) response returned by - `post_list_jobs` will be passed to - `post_list_jobs_with_metadata`. - """ - return response, metadata - - def pre_list_tasks(self, request: batch.ListTasksRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListTasksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_tasks - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_tasks(self, response: batch.ListTasksResponse) -> batch.ListTasksResponse: - """Post-rpc interceptor for list_tasks - - DEPRECATED. Please use the `post_list_tasks_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_list_tasks` interceptor runs - before the `post_list_tasks_with_metadata` interceptor. - """ - return response - - def post_list_tasks_with_metadata(self, response: batch.ListTasksResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListTasksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_tasks - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_list_tasks_with_metadata` - interceptor in new development instead of the `post_list_tasks` interceptor. - When both interceptors are used, this `post_list_tasks_with_metadata` interceptor runs after the - `post_list_tasks` interceptor. The (possibly modified) response returned by - `post_list_tasks` will be passed to - `post_list_tasks_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class BatchServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BatchServiceRestInterceptor - - -class BatchServiceRestTransport(_BaseBatchServiceRestTransport): - """REST backend synchronous transport for BatchService. - - Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BatchServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BatchServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CancelJob(_BaseBatchServiceRestTransport._BaseCancelJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CancelJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.CancelJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the cancel job method over HTTP. - - Args: - request (~.batch.CancelJobRequest): - The request object. CancelJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseCancelJob._get_http_options() - - request, metadata = self._interceptor.pre_cancel_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCancelJob._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCancelJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCancelJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.CancelJob", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "CancelJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CancelJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_cancel_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_cancel_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.cancel_job", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "CancelJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateJob(_BaseBatchServiceRestTransport._BaseCreateJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CreateJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.CreateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_job.Job: - r"""Call the create job method over HTTP. - - Args: - request (~.batch.CreateJobRequest): - The request object. CreateJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_job.Job: - The Cloud Batch Job description. - """ - - http_options = _BaseBatchServiceRestTransport._BaseCreateJob._get_http_options() - - request, metadata = self._interceptor.pre_create_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCreateJob._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCreateJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCreateJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.CreateJob", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "CreateJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CreateJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_job.Job() - pb_resp = gcb_job.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_job.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.create_job", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "CreateJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteJob(_BaseBatchServiceRestTransport._BaseDeleteJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.DeleteJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.DeleteJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete job method over HTTP. - - Args: - request (~.batch.DeleteJobRequest): - The request object. DeleteJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseDeleteJob._get_http_options() - - request, metadata = self._interceptor.pre_delete_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseDeleteJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseDeleteJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.DeleteJob", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "DeleteJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._DeleteJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.delete_job", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "DeleteJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetJob(_BaseBatchServiceRestTransport._BaseGetJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.GetJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> job.Job: - r"""Call the get job method over HTTP. - - Args: - request (~.batch.GetJobRequest): - The request object. GetJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.job.Job: - The Cloud Batch Job description. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetJob._get_http_options() - - request, metadata = self._interceptor.pre_get_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetJob", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = job.Job() - pb_resp = job.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = job.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.get_job", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTask(_BaseBatchServiceRestTransport._BaseGetTask, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.GetTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> task.Task: - r"""Call the get task method over HTTP. - - Args: - request (~.batch.GetTaskRequest): - The request object. Request for a single Task by name. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.task.Task: - A Cloud Batch task. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetTask._get_http_options() - - request, metadata = self._interceptor.pre_get_task(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetTask._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetTask", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = task.Task() - pb_resp = task.Task.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = task.Task.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.get_task", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListJobs(_BaseBatchServiceRestTransport._BaseListJobs, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> batch.ListJobsResponse: - r"""Call the list jobs method over HTTP. - - Args: - request (~.batch.ListJobsRequest): - The request object. ListJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.batch.ListJobsResponse: - ListJob Response. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_jobs(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListJobs", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batch.ListJobsResponse() - pb_resp = batch.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = batch.ListJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.list_jobs", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTasks(_BaseBatchServiceRestTransport._BaseListTasks, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListTasks") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.ListTasksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> batch.ListTasksResponse: - r"""Call the list tasks method over HTTP. - - Args: - request (~.batch.ListTasksRequest): - The request object. ListTasks Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.batch.ListTasksResponse: - ListTasks Response. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListTasks._get_http_options() - - request, metadata = self._interceptor.pre_list_tasks(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListTasks._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListTasks._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListTasks", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListTasks", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListTasks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batch.ListTasksResponse() - pb_resp = batch.ListTasksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_tasks(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_tasks_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = batch.ListTasksResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceClient.list_tasks", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListTasks", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - gcb_job.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - job.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - task.Task]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - batch.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - batch.ListTasksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseBatchServiceRestTransport._BaseGetLocation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseBatchServiceRestTransport._BaseListLocations, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseBatchServiceRestTransport._BaseCancelOperation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBatchServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseBatchServiceRestTransport._BaseDeleteOperation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBatchServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseBatchServiceRestTransport._BaseGetOperation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseBatchServiceRestTransport._BaseListOperations, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1.BatchServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1.BatchServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.batch.v1.BatchService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'BatchServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/rest_base.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/rest_base.py deleted file mode 100644 index 5427d5be1dc6..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/services/batch_service/transports/rest_base.py +++ /dev/null @@ -1,514 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseBatchServiceRestTransport(BatchServiceTransport): - """Base REST backend transport for BatchService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCancelJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.CancelJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseCancelJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/jobs', - 'body': 'job', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.CreateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseCreateJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.DeleteJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.GetJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseGetJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/jobs/*/taskGroups/*/tasks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.GetTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseGetTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/jobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTasks: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/jobs/*/taskGroups/*}/tasks', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.ListTasksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseListTasks._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseBatchServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/__init__.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/__init__.py deleted file mode 100644 index 7d5f537910c1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .batch import ( - CancelJobRequest, - CancelJobResponse, - CreateJobRequest, - DeleteJobRequest, - GetJobRequest, - GetTaskRequest, - ListJobsRequest, - ListJobsResponse, - ListTasksRequest, - ListTasksResponse, - OperationMetadata, -) -from .job import ( - AllocationPolicy, - Job, - JobNotification, - JobStatus, - LogsPolicy, - ServiceAccount, - TaskGroup, -) -from .task import ( - ComputeResource, - Environment, - LifecyclePolicy, - Runnable, - StatusEvent, - Task, - TaskExecution, - TaskSpec, - TaskStatus, -) -from .volume import ( - GCS, - NFS, - Volume, -) - -__all__ = ( - 'CancelJobRequest', - 'CancelJobResponse', - 'CreateJobRequest', - 'DeleteJobRequest', - 'GetJobRequest', - 'GetTaskRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'OperationMetadata', - 'AllocationPolicy', - 'Job', - 'JobNotification', - 'JobStatus', - 'LogsPolicy', - 'ServiceAccount', - 'TaskGroup', - 'ComputeResource', - 'Environment', - 'LifecyclePolicy', - 'Runnable', - 'StatusEvent', - 'Task', - 'TaskExecution', - 'TaskSpec', - 'TaskStatus', - 'GCS', - 'NFS', - 'Volume', -) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/batch.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/batch.py deleted file mode 100644 index 73e96bbe5019..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/batch.py +++ /dev/null @@ -1,423 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1', - manifest={ - 'CreateJobRequest', - 'GetJobRequest', - 'DeleteJobRequest', - 'CancelJobRequest', - 'CancelJobResponse', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'OperationMetadata', - }, -) - - -class CreateJobRequest(proto.Message): - r"""CreateJob Request. - - Attributes: - parent (str): - Required. The parent resource name where the - Job will be created. Pattern: - "projects/{project}/locations/{location}". - job_id (str): - ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters and - must start with lowercase characters. Only lowercase - characters, numbers and '-' are accepted. The '-' character - cannot be the first or the last one. A system generated ID - will be used if the field is not set. - - The job.name field in the request will be ignored and the - created resource name of the Job will be - "{parent}/jobs/{job_id}". - job (google.cloud.batch_v1.types.Job): - Required. The Job to create. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - job: gcb_job.Job = proto.Field( - proto.MESSAGE, - number=3, - message=gcb_job.Job, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class GetJobRequest(proto.Message): - r"""GetJob Request. - - Attributes: - name (str): - Required. Job name. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobRequest(proto.Message): - r"""DeleteJob Request. - - Attributes: - name (str): - Job name. - reason (str): - Optional. Reason for this deletion. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - reason: str = proto.Field( - proto.STRING, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CancelJobRequest(proto.Message): - r"""CancelJob Request. - - Attributes: - name (str): - Required. Job name. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CancelJobResponse(proto.Message): - r"""Response to the CancelJob request. - """ - - -class ListJobsRequest(proto.Message): - r"""ListJob Request. - - Attributes: - parent (str): - Parent path. - filter (str): - List filter. - order_by (str): - Optional. Sort results. Supported are "name", "name desc", - "create_time", and "create_time desc". - page_size (int): - Page size. - page_token (str): - Page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobsResponse(proto.Message): - r"""ListJob Response. - - Attributes: - jobs (MutableSequence[google.cloud.batch_v1.types.Job]): - Jobs. - next_page_token (str): - Next page token. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence[gcb_job.Job] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcb_job.Job, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class ListTasksRequest(proto.Message): - r"""ListTasks Request. - - Attributes: - parent (str): - Required. Name of a TaskGroup from which Tasks are being - requested. Pattern: - "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}". - filter (str): - Task filter, null filter matches all Tasks. - Filter string should be of the format - State=TaskStatus.State e.g. State=RUNNING - page_size (int): - Page size. - page_token (str): - Page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListTasksResponse(proto.Message): - r"""ListTasks Response. - - Attributes: - tasks (MutableSequence[google.cloud.batch_v1.types.Task]): - Tasks. - next_page_token (str): - Next page token. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - tasks: MutableSequence[task.Task] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=task.Task, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetTaskRequest(proto.Message): - r"""Request for a single Task by name. - - Attributes: - name (str): - Required. Task name. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have - [google.longrunning.Operation.error][google.longrunning.Operation.error] - value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/job.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/job.py deleted file mode 100644 index 267bf796bbd2..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/job.py +++ /dev/null @@ -1,1163 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1.types import task -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1', - manifest={ - 'Job', - 'LogsPolicy', - 'JobStatus', - 'JobNotification', - 'AllocationPolicy', - 'TaskGroup', - 'ServiceAccount', - }, -) - - -class Job(proto.Message): - r"""The Cloud Batch Job description. - - Attributes: - name (str): - Output only. Job name. - For example: - "projects/123456/locations/us-central1/jobs/job01". - uid (str): - Output only. A system generated unique ID for - the Job. - priority (int): - Priority of the Job. The valid value range is [0, 100). - Default value is 0. Higher value indicates higher priority. - A job with higher priority value is more likely to run - earlier if all other requirements are satisfied. - task_groups (MutableSequence[google.cloud.batch_v1.types.TaskGroup]): - Required. TaskGroups in the Job. Only one - TaskGroup is supported now. - allocation_policy (google.cloud.batch_v1.types.AllocationPolicy): - Compute resource allocation for all - TaskGroups in the Job. - labels (MutableMapping[str, str]): - Custom labels to apply to the job and any Cloud Logging - `LogEntry `__ - that it generates. - - Use labels to group and describe the resources they are - applied to. Batch automatically applies predefined labels - and supports multiple ``labels`` fields for each job, which - each let you apply custom labels to various resources. Label - names that start with "goog-" or "google-" are reserved for - predefined labels. For more information about labels with - Batch, see `Organize resources using - labels `__. - status (google.cloud.batch_v1.types.JobStatus): - Output only. Job status. It is read only for - users. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. When the Job was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last time the Job was - updated. - logs_policy (google.cloud.batch_v1.types.LogsPolicy): - Log preservation policy for the Job. - notifications (MutableSequence[google.cloud.batch_v1.types.JobNotification]): - Notification configurations. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - priority: int = proto.Field( - proto.INT64, - number=3, - ) - task_groups: MutableSequence['TaskGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='TaskGroup', - ) - allocation_policy: 'AllocationPolicy' = proto.Field( - proto.MESSAGE, - number=7, - message='AllocationPolicy', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - status: 'JobStatus' = proto.Field( - proto.MESSAGE, - number=9, - message='JobStatus', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - logs_policy: 'LogsPolicy' = proto.Field( - proto.MESSAGE, - number=13, - message='LogsPolicy', - ) - notifications: MutableSequence['JobNotification'] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='JobNotification', - ) - - -class LogsPolicy(proto.Message): - r"""LogsPolicy describes if and how a job's logs are preserved. Logs - include information that is automatically written by the Batch - service agent and any information that you configured the job's - runnables to write to the ``stdout`` or ``stderr`` streams. - - Attributes: - destination (google.cloud.batch_v1.types.LogsPolicy.Destination): - If and where logs should be saved. - logs_path (str): - When ``destination`` is set to ``PATH``, you must set this - field to the path where you want logs to be saved. This path - can point to a local directory on the VM or (if congifured) - a directory under the mount path of any Cloud Storage - bucket, network file system (NFS), or writable persistent - disk that is mounted to the job. For example, if the job has - a bucket with ``mountPath`` set to ``/mnt/disks/my-bucket``, - you can write logs to the root directory of the - ``remotePath`` of that bucket by setting this field to - ``/mnt/disks/my-bucket/``. - cloud_logging_option (google.cloud.batch_v1.types.LogsPolicy.CloudLoggingOption): - Optional. When ``destination`` is set to ``CLOUD_LOGGING``, - you can optionally set this field to configure additional - settings for Cloud Logging. - """ - class Destination(proto.Enum): - r"""The destination (if any) for logs. - - Values: - DESTINATION_UNSPECIFIED (0): - (Default) Logs are not preserved. - CLOUD_LOGGING (1): - Logs are streamed to Cloud Logging. Optionally, you can - configure additional settings in the ``cloudLoggingOption`` - field. - PATH (2): - Logs are saved to the file path specified in the - ``logsPath`` field. - """ - DESTINATION_UNSPECIFIED = 0 - CLOUD_LOGGING = 1 - PATH = 2 - - class CloudLoggingOption(proto.Message): - r"""``CloudLoggingOption`` contains additional settings for Cloud - Logging logs generated by Batch job. - - Attributes: - use_generic_task_monitored_resource (bool): - Optional. Set this field to ``true`` to change the - `monitored resource - type `__ - for Cloud Logging logs generated by this Batch job from the - ```batch.googleapis.com/Job`` `__ - type to the formerly used - ```generic_task`` `__ - type. - """ - - use_generic_task_monitored_resource: bool = proto.Field( - proto.BOOL, - number=1, - ) - - destination: Destination = proto.Field( - proto.ENUM, - number=1, - enum=Destination, - ) - logs_path: str = proto.Field( - proto.STRING, - number=2, - ) - cloud_logging_option: CloudLoggingOption = proto.Field( - proto.MESSAGE, - number=3, - message=CloudLoggingOption, - ) - - -class JobStatus(proto.Message): - r"""Job status. - - Attributes: - state (google.cloud.batch_v1.types.JobStatus.State): - Job state - status_events (MutableSequence[google.cloud.batch_v1.types.StatusEvent]): - Job status events - task_groups (MutableMapping[str, google.cloud.batch_v1.types.JobStatus.TaskGroupStatus]): - Aggregated task status for each TaskGroup in - the Job. The map key is TaskGroup ID. - run_duration (google.protobuf.duration_pb2.Duration): - The duration of time that the Job spent in - status RUNNING. - """ - class State(proto.Enum): - r"""Valid Job states. - - Values: - STATE_UNSPECIFIED (0): - Job state unspecified. - QUEUED (1): - Job is admitted (validated and persisted) and - waiting for resources. - SCHEDULED (2): - Job is scheduled to run as soon as resource - allocation is ready. The resource allocation may - happen at a later time but with a high chance to - succeed. - RUNNING (3): - Resource allocation has been successful. At - least one Task in the Job is RUNNING. - SUCCEEDED (4): - All Tasks in the Job have finished - successfully. - FAILED (5): - At least one Task in the Job has failed. - DELETION_IN_PROGRESS (6): - The Job will be deleted, but has not been - deleted yet. Typically this is because resources - used by the Job are still being cleaned up. - CANCELLATION_IN_PROGRESS (7): - The Job cancellation is in progress, this is - because the resources used by the Job are still - being cleaned up. - CANCELLED (8): - The Job has been cancelled, the task - executions were stopped and the resources were - cleaned up. - """ - STATE_UNSPECIFIED = 0 - QUEUED = 1 - SCHEDULED = 2 - RUNNING = 3 - SUCCEEDED = 4 - FAILED = 5 - DELETION_IN_PROGRESS = 6 - CANCELLATION_IN_PROGRESS = 7 - CANCELLED = 8 - - class InstanceStatus(proto.Message): - r"""VM instance status. - - Attributes: - machine_type (str): - The Compute Engine machine type. - provisioning_model (google.cloud.batch_v1.types.AllocationPolicy.ProvisioningModel): - The VM instance provisioning model. - task_pack (int): - The max number of tasks can be assigned to - this instance type. - boot_disk (google.cloud.batch_v1.types.AllocationPolicy.Disk): - The VM boot disk. - """ - - machine_type: str = proto.Field( - proto.STRING, - number=1, - ) - provisioning_model: 'AllocationPolicy.ProvisioningModel' = proto.Field( - proto.ENUM, - number=2, - enum='AllocationPolicy.ProvisioningModel', - ) - task_pack: int = proto.Field( - proto.INT64, - number=3, - ) - boot_disk: 'AllocationPolicy.Disk' = proto.Field( - proto.MESSAGE, - number=4, - message='AllocationPolicy.Disk', - ) - - class TaskGroupStatus(proto.Message): - r"""Aggregated task status for a TaskGroup. - - Attributes: - counts (MutableMapping[str, int]): - Count of task in each state in the TaskGroup. - The map key is task state name. - instances (MutableSequence[google.cloud.batch_v1.types.JobStatus.InstanceStatus]): - Status of instances allocated for the - TaskGroup. - """ - - counts: MutableMapping[str, int] = proto.MapField( - proto.STRING, - proto.INT64, - number=1, - ) - instances: MutableSequence['JobStatus.InstanceStatus'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='JobStatus.InstanceStatus', - ) - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - status_events: MutableSequence[task.StatusEvent] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=task.StatusEvent, - ) - task_groups: MutableMapping[str, TaskGroupStatus] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=4, - message=TaskGroupStatus, - ) - run_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=5, - message=duration_pb2.Duration, - ) - - -class JobNotification(proto.Message): - r"""Notification configurations. - - Attributes: - pubsub_topic (str): - The Pub/Sub topic where notifications for the job, like - state changes, will be published. If undefined, no Pub/Sub - notifications are sent for this job. - - Specify the topic using the following format: - ``projects/{project}/topics/{topic}``. Notably, if you want - to specify a Pub/Sub topic that is in a different project - than the job, your administrator must grant your project's - Batch service agent permission to publish to that topic. - - For more information about configuring Pub/Sub notifications - for a job, see - https://cloud.google.com/batch/docs/enable-notifications. - message (google.cloud.batch_v1.types.JobNotification.Message): - The attribute requirements of messages to be - sent to this Pub/Sub topic. Without this field, - no message will be sent. - """ - class Type(proto.Enum): - r"""The message type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - JOB_STATE_CHANGED (1): - Notify users that the job state has changed. - TASK_STATE_CHANGED (2): - Notify users that the task state has changed. - """ - TYPE_UNSPECIFIED = 0 - JOB_STATE_CHANGED = 1 - TASK_STATE_CHANGED = 2 - - class Message(proto.Message): - r"""Message details. Describe the conditions under which messages will - be sent. If no attribute is defined, no message will be sent by - default. One message should specify either the job or the task level - attributes, but not both. For example, job level: JOB_STATE_CHANGED - and/or a specified new_job_state; task level: TASK_STATE_CHANGED - and/or a specified new_task_state. - - Attributes: - type_ (google.cloud.batch_v1.types.JobNotification.Type): - The message type. - new_job_state (google.cloud.batch_v1.types.JobStatus.State): - The new job state. - new_task_state (google.cloud.batch_v1.types.TaskStatus.State): - The new task state. - """ - - type_: 'JobNotification.Type' = proto.Field( - proto.ENUM, - number=1, - enum='JobNotification.Type', - ) - new_job_state: 'JobStatus.State' = proto.Field( - proto.ENUM, - number=2, - enum='JobStatus.State', - ) - new_task_state: task.TaskStatus.State = proto.Field( - proto.ENUM, - number=3, - enum=task.TaskStatus.State, - ) - - pubsub_topic: str = proto.Field( - proto.STRING, - number=1, - ) - message: Message = proto.Field( - proto.MESSAGE, - number=2, - message=Message, - ) - - -class AllocationPolicy(proto.Message): - r"""A Job's resource allocation policy describes when, where, and - how compute resources should be allocated for the Job. - - Attributes: - location (google.cloud.batch_v1.types.AllocationPolicy.LocationPolicy): - Location where compute resources should be - allocated for the Job. - instances (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.InstancePolicyOrTemplate]): - Describe instances that can be created by this - AllocationPolicy. Only instances[0] is supported now. - service_account (google.cloud.batch_v1.types.ServiceAccount): - Defines the service account for Batch-created VMs. If - omitted, the `default Compute Engine service - account `__ - is used. Must match the service account specified in any - used instance template configured in the Batch job. - - Includes the following fields: - - - email: The service account's email address. If not set, - the default Compute Engine service account is used. - - scopes: Additional OAuth scopes to grant the service - account, beyond the default cloud-platform scope. (list - of strings) - labels (MutableMapping[str, str]): - Custom labels to apply to the job and all the Compute Engine - resources that both are created by this allocation policy - and support labels. - - Use labels to group and describe the resources they are - applied to. Batch automatically applies predefined labels - and supports multiple ``labels`` fields for each job, which - each let you apply custom labels to various resources. Label - names that start with "goog-" or "google-" are reserved for - predefined labels. For more information about labels with - Batch, see `Organize resources using - labels `__. - network (google.cloud.batch_v1.types.AllocationPolicy.NetworkPolicy): - The network policy. - - If you define an instance template in the - ``InstancePolicyOrTemplate`` field, Batch will use the - network settings in the instance template instead of this - field. - placement (google.cloud.batch_v1.types.AllocationPolicy.PlacementPolicy): - The placement policy. - tags (MutableSequence[str]): - Optional. Tags applied to the VM instances. - - The tags identify valid sources or targets for network - firewalls. Each tag must be 1-63 characters long, and comply - with `RFC1035 `__. - """ - class ProvisioningModel(proto.Enum): - r"""Compute Engine VM instance provisioning model. - - Values: - PROVISIONING_MODEL_UNSPECIFIED (0): - Unspecified. - STANDARD (1): - Standard VM. - SPOT (2): - SPOT VM. - PREEMPTIBLE (3): - Preemptible VM (PVM). - - Above SPOT VM is the preferable model for - preemptible VM instances: the old preemptible VM - model (indicated by this field) is the older - model, and has been migrated to use the SPOT - model as the underlying technology. This old - model will still be supported. - """ - PROVISIONING_MODEL_UNSPECIFIED = 0 - STANDARD = 1 - SPOT = 2 - PREEMPTIBLE = 3 - - class LocationPolicy(proto.Message): - r""" - - Attributes: - allowed_locations (MutableSequence[str]): - A list of allowed location names represented by internal - URLs. - - Each location can be a region or a zone. Only one region or - multiple zones in one region is supported now. For example, - ["regions/us-central1"] allow VMs in any zones in region - us-central1. ["zones/us-central1-a", "zones/us-central1-c"] - only allow VMs in zones us-central1-a and us-central1-c. - - Mixing locations from different regions would cause errors. - For example, ["regions/us-central1", "zones/us-central1-a", - "zones/us-central1-b", "zones/us-west1-a"] contains - locations from two distinct regions: us-central1 and - us-west1. This combination will trigger an error. - """ - - allowed_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - class Disk(proto.Message): - r"""A new persistent disk or a local ssd. - A VM can only have one local SSD setting but multiple local SSD - partitions. See - https://cloud.google.com/compute/docs/disks#pdspecs and - https://cloud.google.com/compute/docs/disks#localssds. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image (str): - URL for a VM image to use as the data source for this disk. - For example, the following are all valid URLs: - - - Specify the image by its family name: - projects/{project}/global/images/family/{image_family} - - Specify the image version: - projects/{project}/global/images/{image_version} - - You can also use Batch customized image in short names. The - following image values are supported for a boot disk: - - - ``batch-debian``: use Batch Debian images. - - ``batch-cos``: use Batch Container-Optimized images. - - ``batch-hpc-rocky``: use Batch HPC Rocky Linux images. - - This field is a member of `oneof`_ ``data_source``. - snapshot (str): - Name of a snapshot used as the data source. - Snapshot is not supported as boot disk now. - - This field is a member of `oneof`_ ``data_source``. - type_ (str): - Disk type as shown in ``gcloud compute disk-types list``. - For example, local SSD uses type "local-ssd". Persistent - disks and boot disks use "pd-balanced", "pd-extreme", - "pd-ssd" or "pd-standard". If not specified, "pd-standard" - will be used as the default type for non-boot disks, - "pd-balanced" will be used as the default type for boot - disks. - size_gb (int): - Disk size in GB. - - **Non-Boot Disk**: If the ``type`` specifies a persistent - disk, this field is ignored if ``data_source`` is set as - ``image`` or ``snapshot``. If the ``type`` specifies a local - SSD, this field should be a multiple of 375 GB, otherwise, - the final size will be the next greater multiple of 375 GB. - - **Boot Disk**: Batch will calculate the boot disk size based - on source image and task requirements if you do not speicify - the size. If both this field and the ``boot_disk_mib`` field - in task spec's ``compute_resource`` are defined, Batch will - only honor this field. Also, this field should be no smaller - than the source disk's size when the ``data_source`` is set - as ``snapshot`` or ``image``. For example, if you set an - image as the ``data_source`` field and the image's default - disk size 30 GB, you can only use this field to make the - disk larger or equal to 30 GB. - disk_interface (str): - Local SSDs are available through both "SCSI" and "NVMe" - interfaces. If not indicated, "NVMe" will be the default one - for local ssds. This field is ignored for persistent disks - as the interface is chosen automatically. See - https://cloud.google.com/compute/docs/disks/persistent-disks#choose_an_interface. - """ - - image: str = proto.Field( - proto.STRING, - number=4, - oneof='data_source', - ) - snapshot: str = proto.Field( - proto.STRING, - number=5, - oneof='data_source', - ) - type_: str = proto.Field( - proto.STRING, - number=1, - ) - size_gb: int = proto.Field( - proto.INT64, - number=2, - ) - disk_interface: str = proto.Field( - proto.STRING, - number=6, - ) - - class AttachedDisk(proto.Message): - r"""A new or an existing persistent disk (PD) or a local ssd - attached to a VM instance. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - new_disk (google.cloud.batch_v1.types.AllocationPolicy.Disk): - - This field is a member of `oneof`_ ``attached``. - existing_disk (str): - Name of an existing PD. - - This field is a member of `oneof`_ ``attached``. - device_name (str): - Device name that the guest operating system will see. It is - used by Runnable.volumes field to mount disks. So please - specify the device_name if you want Batch to help mount the - disk, and it should match the device_name field in volumes. - """ - - new_disk: 'AllocationPolicy.Disk' = proto.Field( - proto.MESSAGE, - number=1, - oneof='attached', - message='AllocationPolicy.Disk', - ) - existing_disk: str = proto.Field( - proto.STRING, - number=2, - oneof='attached', - ) - device_name: str = proto.Field( - proto.STRING, - number=3, - ) - - class Accelerator(proto.Message): - r"""Accelerator describes Compute Engine accelerators to be - attached to the VM. - - Attributes: - type_ (str): - The accelerator type. For example, "nvidia-tesla-t4". See - ``gcloud compute accelerator-types list``. - count (int): - The number of accelerators of this type. - install_gpu_drivers (bool): - Deprecated: please use instances[0].install_gpu_drivers - instead. - driver_version (str): - Optional. The NVIDIA GPU driver version that - should be installed for this type. - - You can define the specific driver version such - as "470.103.01", following the driver version - requirements in - https://cloud.google.com/compute/docs/gpus/install-drivers-gpu#minimum-driver. - Batch will install the specific accelerator - driver if qualified. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - install_gpu_drivers: bool = proto.Field( - proto.BOOL, - number=3, - ) - driver_version: str = proto.Field( - proto.STRING, - number=4, - ) - - class InstancePolicy(proto.Message): - r"""InstancePolicy describes an instance type and resources - attached to each VM created by this InstancePolicy. - - Attributes: - machine_type (str): - The Compute Engine machine type. - min_cpu_platform (str): - The minimum CPU platform. - See - https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. - provisioning_model (google.cloud.batch_v1.types.AllocationPolicy.ProvisioningModel): - The provisioning model. - accelerators (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.Accelerator]): - The accelerators attached to each VM - instance. - boot_disk (google.cloud.batch_v1.types.AllocationPolicy.Disk): - Boot disk to be created and attached to each - VM by this InstancePolicy. Boot disk will be - deleted when the VM is deleted. Batch API now - only supports booting from image. - disks (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.AttachedDisk]): - Non-boot disks to be attached for each VM - created by this InstancePolicy. New disks will - be deleted when the VM is deleted. A non-boot - disk is a disk that can be of a device with a - file system or a raw storage drive that is not - ready for data storage and accessing. - reservation (str): - Optional. If not specified (default), VMs will consume any - applicable reservation. If "NO_RESERVATION" is specified, - VMs will not consume any reservation. Otherwise, if - specified, VMs will consume only the specified reservation. - """ - - machine_type: str = proto.Field( - proto.STRING, - number=2, - ) - min_cpu_platform: str = proto.Field( - proto.STRING, - number=3, - ) - provisioning_model: 'AllocationPolicy.ProvisioningModel' = proto.Field( - proto.ENUM, - number=4, - enum='AllocationPolicy.ProvisioningModel', - ) - accelerators: MutableSequence['AllocationPolicy.Accelerator'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AllocationPolicy.Accelerator', - ) - boot_disk: 'AllocationPolicy.Disk' = proto.Field( - proto.MESSAGE, - number=8, - message='AllocationPolicy.Disk', - ) - disks: MutableSequence['AllocationPolicy.AttachedDisk'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AllocationPolicy.AttachedDisk', - ) - reservation: str = proto.Field( - proto.STRING, - number=7, - ) - - class InstancePolicyOrTemplate(proto.Message): - r"""InstancePolicyOrTemplate lets you define the type of - resources to use for this job either with an InstancePolicy or - an instance template. If undefined, Batch picks the type of VM - to use and doesn't include optional VM resources such as GPUs - and extra disks. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - policy (google.cloud.batch_v1.types.AllocationPolicy.InstancePolicy): - InstancePolicy. - - This field is a member of `oneof`_ ``policy_template``. - instance_template (str): - Name of an instance template used to create VMs. Named the - field as 'instance_template' instead of 'template' to avoid - C++ keyword conflict. - - Batch only supports global instance templates from the same - project as the job. You can specify the global instance - template as a full or partial URL. - - This field is a member of `oneof`_ ``policy_template``. - install_gpu_drivers (bool): - Set this field true if you want Batch to help fetch drivers - from a third party location and install them for GPUs - specified in ``policy.accelerators`` or - ``instance_template`` on your behalf. Default is false. - - For Container-Optimized Image cases, Batch will install the - accelerator driver following milestones of - https://cloud.google.com/container-optimized-os/docs/release-notes. - For non Container-Optimized Image cases, following - https://github.com/GoogleCloudPlatform/compute-gpu-installation/blob/main/linux/install_gpu_driver.py. - install_ops_agent (bool): - Optional. Set this field true if you want - Batch to install Ops Agent on your behalf. - Default is false. - block_project_ssh_keys (bool): - Optional. Set this field to ``true`` if you want Batch to - block project-level SSH keys from accessing this job's VMs. - Alternatively, you can configure the job to specify a VM - instance template that blocks project-level SSH keys. In - either case, Batch blocks project-level SSH keys while - creating the VMs for this job. - - Batch allows project-level SSH keys for a job's VMs only if - all the following are true: - - - This field is undefined or set to ``false``. - - The job's VM instance template (if any) doesn't block - project-level SSH keys. - - Notably, you can override this behavior by manually updating - a VM to block or allow project-level SSH keys. For more - information about blocking project-level SSH keys, see the - Compute Engine documentation: - https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys - """ - - policy: 'AllocationPolicy.InstancePolicy' = proto.Field( - proto.MESSAGE, - number=1, - oneof='policy_template', - message='AllocationPolicy.InstancePolicy', - ) - instance_template: str = proto.Field( - proto.STRING, - number=2, - oneof='policy_template', - ) - install_gpu_drivers: bool = proto.Field( - proto.BOOL, - number=3, - ) - install_ops_agent: bool = proto.Field( - proto.BOOL, - number=4, - ) - block_project_ssh_keys: bool = proto.Field( - proto.BOOL, - number=5, - ) - - class NetworkInterface(proto.Message): - r"""A network interface. - - Attributes: - network (str): - The URL of an existing network resource. You can specify the - network as a full or partial URL. - - For example, the following are all valid URLs: - - - https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network} - - projects/{project}/global/networks/{network} - - global/networks/{network} - subnetwork (str): - The URL of an existing subnetwork resource in the network. - You can specify the subnetwork as a full or partial URL. - - For example, the following are all valid URLs: - - - https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork} - - projects/{project}/regions/{region}/subnetworks/{subnetwork} - - regions/{region}/subnetworks/{subnetwork} - no_external_ip_address (bool): - Default is false (with an external IP - address). Required if no external public IP - address is attached to the VM. If no external - public IP address, additional configuration is - required to allow the VM to access Google - Services. See - https://cloud.google.com/vpc/docs/configure-private-google-access - and - https://cloud.google.com/nat/docs/gce-example#create-nat - for more information. - """ - - network: str = proto.Field( - proto.STRING, - number=1, - ) - subnetwork: str = proto.Field( - proto.STRING, - number=2, - ) - no_external_ip_address: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class NetworkPolicy(proto.Message): - r"""NetworkPolicy describes VM instance network configurations. - - Attributes: - network_interfaces (MutableSequence[google.cloud.batch_v1.types.AllocationPolicy.NetworkInterface]): - Network configurations. - """ - - network_interfaces: MutableSequence['AllocationPolicy.NetworkInterface'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AllocationPolicy.NetworkInterface', - ) - - class PlacementPolicy(proto.Message): - r"""PlacementPolicy describes a group placement policy for the - VMs controlled by this AllocationPolicy. - - Attributes: - collocation (str): - UNSPECIFIED vs. COLLOCATED (default - UNSPECIFIED). Use COLLOCATED when you want VMs - to be located close to each other for low - network latency between the VMs. No placement - policy will be generated when collocation is - UNSPECIFIED. - max_distance (int): - When specified, causes the job to fail if more than - max_distance logical switches are required between VMs. - Batch uses the most compact possible placement of VMs even - when max_distance is not specified. An explicit max_distance - makes that level of compactness a strict requirement. Not - yet implemented - """ - - collocation: str = proto.Field( - proto.STRING, - number=1, - ) - max_distance: int = proto.Field( - proto.INT64, - number=2, - ) - - location: LocationPolicy = proto.Field( - proto.MESSAGE, - number=1, - message=LocationPolicy, - ) - instances: MutableSequence[InstancePolicyOrTemplate] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=InstancePolicyOrTemplate, - ) - service_account: 'ServiceAccount' = proto.Field( - proto.MESSAGE, - number=9, - message='ServiceAccount', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - network: NetworkPolicy = proto.Field( - proto.MESSAGE, - number=7, - message=NetworkPolicy, - ) - placement: PlacementPolicy = proto.Field( - proto.MESSAGE, - number=10, - message=PlacementPolicy, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=11, - ) - - -class TaskGroup(proto.Message): - r"""A TaskGroup defines one or more Tasks that all share the same - TaskSpec. - - Attributes: - name (str): - Output only. TaskGroup name. - The system generates this field based on parent - Job name. For example: - - "projects/123456/locations/us-west1/jobs/job01/taskGroups/group01". - task_spec (google.cloud.batch_v1.types.TaskSpec): - Required. Tasks in the group share the same - task spec. - task_count (int): - Number of Tasks in the TaskGroup. - Default is 1. - parallelism (int): - Max number of tasks that can run in parallel. Default to - min(task_count, parallel tasks per job limit). See: `Job - Limits `__. - Field parallelism must be 1 if the scheduling_policy is - IN_ORDER. - scheduling_policy (google.cloud.batch_v1.types.TaskGroup.SchedulingPolicy): - Scheduling policy for Tasks in the TaskGroup. The default - value is AS_SOON_AS_POSSIBLE. - task_environments (MutableSequence[google.cloud.batch_v1.types.Environment]): - An array of environment variable mappings, which are passed - to Tasks with matching indices. If task_environments is used - then task_count should not be specified in the request (and - will be ignored). Task count will be the length of - task_environments. - - Tasks get a BATCH_TASK_INDEX and BATCH_TASK_COUNT - environment variable, in addition to any environment - variables set in task_environments, specifying the number of - Tasks in the Task's parent TaskGroup, and the specific - Task's index in the TaskGroup (0 through BATCH_TASK_COUNT - - 1). - task_count_per_node (int): - Max number of tasks that can be run on a VM - at the same time. If not specified, the system - will decide a value based on available compute - resources on a VM and task requirements. - require_hosts_file (bool): - When true, Batch will populate a file with a list of all VMs - assigned to the TaskGroup and set the BATCH_HOSTS_FILE - environment variable to the path of that file. Defaults to - false. The host file supports up to 1000 VMs. - permissive_ssh (bool): - When true, Batch will configure SSH to allow - passwordless login between VMs running the Batch - tasks in the same TaskGroup. - run_as_non_root (bool): - Optional. If not set or set to false, Batch uses the root - user to execute runnables. If set to true, Batch runs the - runnables using a non-root user. Currently, the non-root - user Batch used is generated by OS Login. For more - information, see `About OS - Login `__. - """ - class SchedulingPolicy(proto.Enum): - r"""How Tasks in the TaskGroup should be scheduled relative to - each other. - - Values: - SCHEDULING_POLICY_UNSPECIFIED (0): - Unspecified. - AS_SOON_AS_POSSIBLE (1): - Run Tasks as soon as resources are available. - - Tasks might be executed in parallel depending on parallelism - and task_count values. - IN_ORDER (2): - Run Tasks sequentially with increased task - index. - """ - SCHEDULING_POLICY_UNSPECIFIED = 0 - AS_SOON_AS_POSSIBLE = 1 - IN_ORDER = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - task_spec: task.TaskSpec = proto.Field( - proto.MESSAGE, - number=3, - message=task.TaskSpec, - ) - task_count: int = proto.Field( - proto.INT64, - number=4, - ) - parallelism: int = proto.Field( - proto.INT64, - number=5, - ) - scheduling_policy: SchedulingPolicy = proto.Field( - proto.ENUM, - number=6, - enum=SchedulingPolicy, - ) - task_environments: MutableSequence[task.Environment] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=task.Environment, - ) - task_count_per_node: int = proto.Field( - proto.INT64, - number=10, - ) - require_hosts_file: bool = proto.Field( - proto.BOOL, - number=11, - ) - permissive_ssh: bool = proto.Field( - proto.BOOL, - number=12, - ) - run_as_non_root: bool = proto.Field( - proto.BOOL, - number=14, - ) - - -class ServiceAccount(proto.Message): - r"""Carries information about a Google Cloud service account. - - Attributes: - email (str): - Email address of the service account. - scopes (MutableSequence[str]): - List of scopes to be enabled for this service - account. - """ - - email: str = proto.Field( - proto.STRING, - number=1, - ) - scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/task.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/task.py deleted file mode 100644 index 8a2fd5d3efff..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/task.py +++ /dev/null @@ -1,809 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1.types import volume -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1', - manifest={ - 'ComputeResource', - 'StatusEvent', - 'TaskExecution', - 'TaskStatus', - 'Runnable', - 'TaskSpec', - 'LifecyclePolicy', - 'Task', - 'Environment', - }, -) - - -class ComputeResource(proto.Message): - r"""Compute resource requirements. - - ComputeResource defines the amount of resources required for each - task. Make sure your tasks have enough resources to successfully - run. If you also define the types of resources for a job to use with - the - `InstancePolicyOrTemplate `__ - field, make sure both fields are compatible with each other. - - Attributes: - cpu_milli (int): - The milliCPU count. - - ``cpuMilli`` defines the amount of CPU resources per task in - milliCPU units. For example, ``1000`` corresponds to 1 vCPU - per task. If undefined, the default value is ``2000``. - - If you also define the VM's machine type using the - ``machineType`` in - `InstancePolicy `__ - field or inside the ``instanceTemplate`` in the - `InstancePolicyOrTemplate `__ - field, make sure the CPU resources for both fields are - compatible with each other and with how many tasks you want - to allow to run on the same VM at the same time. - - For example, if you specify the ``n2-standard-2`` machine - type, which has 2 vCPUs each, you are recommended to set - ``cpuMilli`` no more than ``2000``, or you are recommended - to run two tasks on the same VM if you set ``cpuMilli`` to - ``1000`` or less. - memory_mib (int): - Memory in MiB. - - ``memoryMib`` defines the amount of memory per task in MiB - units. If undefined, the default value is ``2000``. If you - also define the VM's machine type using the ``machineType`` - in - `InstancePolicy `__ - field or inside the ``instanceTemplate`` in the - `InstancePolicyOrTemplate `__ - field, make sure the memory resources for both fields are - compatible with each other and with how many tasks you want - to allow to run on the same VM at the same time. - - For example, if you specify the ``n2-standard-2`` machine - type, which has 8 GiB each, you are recommended to set - ``memoryMib`` to no more than ``8192``, or you are - recommended to run two tasks on the same VM if you set - ``memoryMib`` to ``4096`` or less. - boot_disk_mib (int): - Extra boot disk size in MiB for each task. - """ - - cpu_milli: int = proto.Field( - proto.INT64, - number=1, - ) - memory_mib: int = proto.Field( - proto.INT64, - number=2, - ) - boot_disk_mib: int = proto.Field( - proto.INT64, - number=4, - ) - - -class StatusEvent(proto.Message): - r"""Status event. - - Attributes: - type_ (str): - Type of the event. - description (str): - Description of the event. - event_time (google.protobuf.timestamp_pb2.Timestamp): - The time this event occurred. - task_execution (google.cloud.batch_v1.types.TaskExecution): - Task Execution. - This field is only defined for task-level status - events where the task fails. - task_state (google.cloud.batch_v1.types.TaskStatus.State): - Task State. - This field is only defined for task-level status - events. - """ - - type_: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=1, - ) - event_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - task_execution: 'TaskExecution' = proto.Field( - proto.MESSAGE, - number=4, - message='TaskExecution', - ) - task_state: 'TaskStatus.State' = proto.Field( - proto.ENUM, - number=5, - enum='TaskStatus.State', - ) - - -class TaskExecution(proto.Message): - r"""This Task Execution field includes detail information for - task execution procedures, based on StatusEvent types. - - Attributes: - exit_code (int): - The exit code of a finished task. - - If the task succeeded, the exit code will be 0. If the task - failed but not due to the following reasons, the exit code - will be 50000. - - Otherwise, it can be from different sources: - - - Batch known failures: - https://cloud.google.com/batch/docs/troubleshooting#reserved-exit-codes. - - Batch runnable execution failures; you can rely on Batch - logs to further diagnose: - https://cloud.google.com/batch/docs/analyze-job-using-logs. - If there are multiple runnables failures, Batch only - exposes the first error. - """ - - exit_code: int = proto.Field( - proto.INT32, - number=1, - ) - - -class TaskStatus(proto.Message): - r"""Status of a task. - - Attributes: - state (google.cloud.batch_v1.types.TaskStatus.State): - Task state. - status_events (MutableSequence[google.cloud.batch_v1.types.StatusEvent]): - Detailed info about why the state is reached. - """ - class State(proto.Enum): - r"""Task states. - - Values: - STATE_UNSPECIFIED (0): - Unknown state. - PENDING (1): - The Task is created and waiting for - resources. - ASSIGNED (2): - The Task is assigned to at least one VM. - RUNNING (3): - The Task is running. - FAILED (4): - The Task has failed. - SUCCEEDED (5): - The Task has succeeded. - UNEXECUTED (6): - The Task has not been executed when the Job - finishes. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - ASSIGNED = 2 - RUNNING = 3 - FAILED = 4 - SUCCEEDED = 5 - UNEXECUTED = 6 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - status_events: MutableSequence['StatusEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='StatusEvent', - ) - - -class Runnable(proto.Message): - r"""Runnable describes instructions for executing a specific - script or container as part of a Task. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container (google.cloud.batch_v1.types.Runnable.Container): - Container runnable. - - This field is a member of `oneof`_ ``executable``. - script (google.cloud.batch_v1.types.Runnable.Script): - Script runnable. - - This field is a member of `oneof`_ ``executable``. - barrier (google.cloud.batch_v1.types.Runnable.Barrier): - Barrier runnable. - - This field is a member of `oneof`_ ``executable``. - display_name (str): - Optional. DisplayName is an optional field - that can be provided by the caller. If provided, - it will be used in logs and other outputs to - identify the script, making it easier for users - to understand the logs. If not provided the - index of the runnable will be used for outputs. - ignore_exit_status (bool): - Normally, a runnable that returns a non-zero exit status - fails and causes the task to fail. However, you can set this - field to ``true`` to allow the task to continue executing - its other runnables even if this runnable fails. - background (bool): - Normally, a runnable that doesn't exit causes its task to - fail. However, you can set this field to ``true`` to - configure a background runnable. Background runnables are - allowed continue running in the background while the task - executes subsequent runnables. For example, background - runnables are useful for providing services to other - runnables or providing debugging-support tools like SSH - servers. - - Specifically, background runnables are killed automatically - (if they have not already exited) a short time after all - foreground runnables have completed. Even though this is - likely to result in a non-zero exit status for the - background runnable, these automatic kills are not treated - as task failures. - always_run (bool): - By default, after a Runnable fails, no further Runnable are - executed. This flag indicates that this Runnable must be run - even if the Task has already failed. This is useful for - Runnables that copy output files off of the VM or for - debugging. - - The always_run flag does not override the Task's overall - max_run_duration. If the max_run_duration has expired then - no further Runnables will execute, not even always_run - Runnables. - environment (google.cloud.batch_v1.types.Environment): - Environment variables for this Runnable - (overrides variables set for the whole Task or - TaskGroup). - timeout (google.protobuf.duration_pb2.Duration): - Timeout for this Runnable. - labels (MutableMapping[str, str]): - Labels for this Runnable. - """ - - class Container(proto.Message): - r"""Container runnable. - - Attributes: - image_uri (str): - Required. The URI to pull the container image - from. - commands (MutableSequence[str]): - Required for some container images. Overrides the ``CMD`` - specified in the container. If there is an ``ENTRYPOINT`` - (either in the container image or with the ``entrypoint`` - field below) then these commands are appended as arguments - to the ``ENTRYPOINT``. - entrypoint (str): - Required for some container images. Overrides the - ``ENTRYPOINT`` specified in the container. - volumes (MutableSequence[str]): - Volumes to mount (bind mount) from the host machine files or - directories into the container, formatted to match - ``--volume`` option for the ``docker run`` command—for - example, ``/foo:/bar`` or ``/foo:/bar:ro``. - - If the ``TaskSpec.Volumes`` field is specified but this - field is not, Batch will mount each volume from the host - machine to the container with the same mount path by - default. In this case, the default mount option for - containers will be read-only (``ro``) for existing - persistent disks and read-write (``rw``) for other volume - types, regardless of the original mount options specified in - ``TaskSpec.Volumes``. If you need different mount settings, - you can explicitly configure them in this field. - options (str): - Required for some container images. Arbitrary additional - options to include in the ``docker run`` command when - running this container—for example, ``--network host``. For - the ``--volume`` option, use the ``volumes`` field for the - container. - block_external_network (bool): - If set to true, external network access to and from - container will be blocked, containers that are with - block_external_network as true can still communicate with - each other, network cannot be specified in the - ``container.options`` field. - username (str): - Required if the container image is from a private Docker - registry. The username to login to the Docker registry that - contains the image. - - You can either specify the username directly by using plain - text or specify an encrypted username by using a Secret - Manager secret: ``projects/*/secrets/*/versions/*``. - However, using a secret is recommended for enhanced - security. - - Caution: If you specify the username using plain text, you - risk the username being exposed to any users who can view - the job or its logs. To avoid this risk, specify a secret - that contains the username instead. - - Learn more about `Secret - Manager `__ - and `using Secret Manager with - Batch `__. - password (str): - Required if the container image is from a private Docker - registry. The password to login to the Docker registry that - contains the image. - - For security, it is strongly recommended to specify an - encrypted password by using a Secret Manager secret: - ``projects/*/secrets/*/versions/*``. - - Warning: If you specify the password using plain text, you - risk the password being exposed to any users who can view - the job or its logs. To avoid this risk, specify a secret - that contains the password instead. - - Learn more about `Secret - Manager `__ - and `using Secret Manager with - Batch `__. - enable_image_streaming (bool): - Optional. If set to true, this container runnable uses Image - streaming. - - Use Image streaming to allow the runnable to initialize - without waiting for the entire container image to download, - which can significantly reduce startup time for large - container images. - - When ``enableImageStreaming`` is set to true, the container - runtime is `containerd `__ instead - of Docker. Additionally, this container runnable only - supports the following ``container`` subfields: - ``imageUri``, ``commands[]``, ``entrypoint``, and - ``volumes[]``; any other ``container`` subfields are - ignored. - - For more information about the requirements and limitations - for using Image streaming with Batch, see the - ```image-streaming`` sample on - GitHub `__. - """ - - image_uri: str = proto.Field( - proto.STRING, - number=1, - ) - commands: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - entrypoint: str = proto.Field( - proto.STRING, - number=3, - ) - volumes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - options: str = proto.Field( - proto.STRING, - number=8, - ) - block_external_network: bool = proto.Field( - proto.BOOL, - number=9, - ) - username: str = proto.Field( - proto.STRING, - number=10, - ) - password: str = proto.Field( - proto.STRING, - number=11, - ) - enable_image_streaming: bool = proto.Field( - proto.BOOL, - number=12, - ) - - class Script(proto.Message): - r"""Script runnable. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - path (str): - The path to a script file that is accessible from the host - VM(s). - - Unless the script file supports the default ``#!/bin/sh`` - shell interpreter, you must specify an interpreter by - including a [shebang - line](https://en.wikipedia.org/wiki/Shebang_(Unix) as the - first line of the file. For example, to execute the script - using bash, include ``#!/bin/bash`` as the first line of the - file. Alternatively, to execute the script using Python3, - include ``#!/usr/bin/env python3`` as the first line of the - file. - - This field is a member of `oneof`_ ``command``. - text (str): - The text for a script. - - Unless the script text supports the default ``#!/bin/sh`` - shell interpreter, you must specify an interpreter by - including a [shebang - line](https://en.wikipedia.org/wiki/Shebang_(Unix) at the - beginning of the text. For example, to execute the script - using bash, include ``#!/bin/bash\n`` at the beginning of - the text. Alternatively, to execute the script using - Python3, include ``#!/usr/bin/env python3\n`` at the - beginning of the text. - - This field is a member of `oneof`_ ``command``. - """ - - path: str = proto.Field( - proto.STRING, - number=1, - oneof='command', - ) - text: str = proto.Field( - proto.STRING, - number=2, - oneof='command', - ) - - class Barrier(proto.Message): - r"""A barrier runnable automatically blocks the execution of - subsequent runnables until all the tasks in the task group reach - the barrier. - - Attributes: - name (str): - Barriers are identified by their index in - runnable list. Names are not required, but if - present should be an identifier. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - container: Container = proto.Field( - proto.MESSAGE, - number=1, - oneof='executable', - message=Container, - ) - script: Script = proto.Field( - proto.MESSAGE, - number=2, - oneof='executable', - message=Script, - ) - barrier: Barrier = proto.Field( - proto.MESSAGE, - number=6, - oneof='executable', - message=Barrier, - ) - display_name: str = proto.Field( - proto.STRING, - number=10, - ) - ignore_exit_status: bool = proto.Field( - proto.BOOL, - number=3, - ) - background: bool = proto.Field( - proto.BOOL, - number=4, - ) - always_run: bool = proto.Field( - proto.BOOL, - number=5, - ) - environment: 'Environment' = proto.Field( - proto.MESSAGE, - number=7, - message='Environment', - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=9, - ) - - -class TaskSpec(proto.Message): - r"""Spec of a task - - Attributes: - runnables (MutableSequence[google.cloud.batch_v1.types.Runnable]): - Required. The sequence of one or more runnables (executable - scripts, executable containers, and/or barriers) for each - task in this task group to run. Each task runs this list of - runnables in order. For a task to succeed, all of its script - and container runnables each must meet at least one of the - following conditions: - - - The runnable exited with a zero status. - - The runnable didn't finish, but you enabled its - ``background`` subfield. - - The runnable exited with a non-zero status, but you - enabled its ``ignore_exit_status`` subfield. - compute_resource (google.cloud.batch_v1.types.ComputeResource): - ComputeResource requirements. - max_run_duration (google.protobuf.duration_pb2.Duration): - Maximum duration the task should run before being - automatically retried (if enabled) or automatically failed. - Format the value of this field as a time limit in seconds - followed by ``s``—for example, ``3600s`` for 1 hour. The - field accepts any value between 0 and the maximum listed for - the ``Duration`` field type at - https://protobuf.dev/reference/protobuf/google.protobuf/#duration; - however, the actual maximum run time for a job will be - limited to the maximum run time for a job listed at - https://cloud.google.com/batch/quotas#max-job-duration. - max_retry_count (int): - Maximum number of retries on failures. The default, 0, which - means never retry. The valid value range is [0, 10]. - lifecycle_policies (MutableSequence[google.cloud.batch_v1.types.LifecyclePolicy]): - Lifecycle management schema when any task in a task group is - failed. Currently we only support one lifecycle policy. When - the lifecycle policy condition is met, the action in the - policy will execute. If task execution result does not meet - with the defined lifecycle policy, we consider it as the - default policy. Default policy means if the exit code is 0, - exit task. If task ends with non-zero exit code, retry the - task with max_retry_count. - environments (MutableMapping[str, str]): - Deprecated: please use - environment(non-plural) instead. - volumes (MutableSequence[google.cloud.batch_v1.types.Volume]): - Volumes to mount before running Tasks using - this TaskSpec. - environment (google.cloud.batch_v1.types.Environment): - Environment variables to set before running - the Task. - """ - - runnables: MutableSequence['Runnable'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='Runnable', - ) - compute_resource: 'ComputeResource' = proto.Field( - proto.MESSAGE, - number=3, - message='ComputeResource', - ) - max_run_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - max_retry_count: int = proto.Field( - proto.INT32, - number=5, - ) - lifecycle_policies: MutableSequence['LifecyclePolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='LifecyclePolicy', - ) - environments: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - volumes: MutableSequence[volume.Volume] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=volume.Volume, - ) - environment: 'Environment' = proto.Field( - proto.MESSAGE, - number=10, - message='Environment', - ) - - -class LifecyclePolicy(proto.Message): - r"""LifecyclePolicy describes how to deal with task failures - based on different conditions. - - Attributes: - action (google.cloud.batch_v1.types.LifecyclePolicy.Action): - Action to execute when ActionCondition is true. When - RETRY_TASK is specified, we will retry failed tasks if we - notice any exit code match and fail tasks if no match is - found. Likewise, when FAIL_TASK is specified, we will fail - tasks if we notice any exit code match and retry tasks if no - match is found. - action_condition (google.cloud.batch_v1.types.LifecyclePolicy.ActionCondition): - Conditions that decide why a task failure is - dealt with a specific action. - """ - class Action(proto.Enum): - r"""Action on task failures based on different conditions. - - Values: - ACTION_UNSPECIFIED (0): - Action unspecified. - RETRY_TASK (1): - Action that tasks in the group will be - scheduled to re-execute. - FAIL_TASK (2): - Action that tasks in the group will be - stopped immediately. - """ - ACTION_UNSPECIFIED = 0 - RETRY_TASK = 1 - FAIL_TASK = 2 - - class ActionCondition(proto.Message): - r"""Conditions for actions to deal with task failures. - - Attributes: - exit_codes (MutableSequence[int]): - Exit codes of a task execution. - If there are more than 1 exit codes, - when task executes with any of the exit code in - the list, the condition is met and the action - will be executed. - """ - - exit_codes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=1, - ) - - action: Action = proto.Field( - proto.ENUM, - number=1, - enum=Action, - ) - action_condition: ActionCondition = proto.Field( - proto.MESSAGE, - number=2, - message=ActionCondition, - ) - - -class Task(proto.Message): - r"""A Cloud Batch task. - - Attributes: - name (str): - Task name. - The name is generated from the parent TaskGroup - name and 'id' field. For example: - - "projects/123456/locations/us-west1/jobs/job01/taskGroups/group01/tasks/task01". - status (google.cloud.batch_v1.types.TaskStatus): - Task Status. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - status: 'TaskStatus' = proto.Field( - proto.MESSAGE, - number=2, - message='TaskStatus', - ) - - -class Environment(proto.Message): - r"""An Environment describes a collection of environment - variables to set when executing Tasks. - - Attributes: - variables (MutableMapping[str, str]): - A map of environment variable names to - values. - secret_variables (MutableMapping[str, str]): - A map of environment variable names to Secret - Manager secret names. The VM will access the - named secrets to set the value of each - environment variable. - encrypted_variables (google.cloud.batch_v1.types.Environment.KMSEnvMap): - An encrypted JSON dictionary where the - key/value pairs correspond to environment - variable names and their values. - """ - - class KMSEnvMap(proto.Message): - r""" - - Attributes: - key_name (str): - The name of the KMS key that will be used to - decrypt the cipher text. - cipher_text (str): - The value of the cipherText response from the ``encrypt`` - method. - """ - - key_name: str = proto.Field( - proto.STRING, - number=1, - ) - cipher_text: str = proto.Field( - proto.STRING, - number=2, - ) - - variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - secret_variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - encrypted_variables: KMSEnvMap = proto.Field( - proto.MESSAGE, - number=3, - message=KMSEnvMap, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/volume.py b/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/volume.py deleted file mode 100644 index 85635e2f8232..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/google/cloud/batch_v1/types/volume.py +++ /dev/null @@ -1,146 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1', - manifest={ - 'Volume', - 'NFS', - 'GCS', - }, -) - - -class Volume(proto.Message): - r"""Volume describes a volume and parameters for it to be mounted - to a VM. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - nfs (google.cloud.batch_v1.types.NFS): - A Network File System (NFS) volume. For - example, a Filestore file share. - - This field is a member of `oneof`_ ``source``. - gcs (google.cloud.batch_v1.types.GCS): - A Google Cloud Storage (GCS) volume. - - This field is a member of `oneof`_ ``source``. - device_name (str): - Device name of an attached disk volume, which should align - with a device_name specified by - job.allocation_policy.instances[0].policy.disks[i].device_name - or defined by the given instance template in - job.allocation_policy.instances[0].instance_template. - - This field is a member of `oneof`_ ``source``. - mount_path (str): - The mount path for the volume, e.g. - /mnt/disks/share. - mount_options (MutableSequence[str]): - Mount options vary based on the type of storage volume: - - - For a Cloud Storage bucket, all the mount options - provided by the ```gcsfuse`` - tool `__ - are supported. - - For an existing persistent disk, all mount options - provided by the ```mount`` - command `__ - except writing are supported. This is due to restrictions - of `multi-writer - mode `__. - - For any other disk or a Network File System (NFS), all - the mount options provided by the ``mount`` command are - supported. - """ - - nfs: 'NFS' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='NFS', - ) - gcs: 'GCS' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='GCS', - ) - device_name: str = proto.Field( - proto.STRING, - number=6, - oneof='source', - ) - mount_path: str = proto.Field( - proto.STRING, - number=4, - ) - mount_options: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class NFS(proto.Message): - r"""Represents an NFS volume. - - Attributes: - server (str): - The IP address of the NFS. - remote_path (str): - Remote source path exported from the NFS, - e.g., "/share". - """ - - server: str = proto.Field( - proto.STRING, - number=1, - ) - remote_path: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GCS(proto.Message): - r"""Represents a Google Cloud Storage volume. - - Attributes: - remote_path (str): - Remote path, either a bucket name or a subdirectory of a - bucket, e.g.: bucket_name, bucket_name/subdirectory/ - """ - - remote_path: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1/mypy.ini b/owl-bot-staging/google-cloud-batch/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-batch/v1/noxfile.py b/owl-bot-staging/google-cloud-batch/v1/noxfile.py deleted file mode 100644 index 9333088a09ee..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-batch' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/batch_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/batch_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_async.py deleted file mode 100644 index 87a4679ccdb9..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_CancelJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_cancel_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_sync.py deleted file mode 100644 index 20acc49f5fde..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_cancel_job_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_CancelJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_cancel_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_create_job_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_create_job_async.py deleted file mode 100644 index fa1f4a68e910..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_create_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_CreateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_create_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_CreateJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync.py deleted file mode 100644 index 29424402efd7..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_create_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_CreateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_create_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_CreateJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py deleted file mode 100644 index 593bbfe07af5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_delete_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_DeleteJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_delete_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_DeleteJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync.py deleted file mode 100644 index 10b87f0813d7..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_delete_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_DeleteJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_delete_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_DeleteJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_job_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_job_async.py deleted file mode 100644 index b7a705f31a42..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_get_job(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync.py deleted file mode 100644 index 28828796c9c1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_get_job(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_task_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_task_async.py deleted file mode 100644 index a85e4b575b47..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_task_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_get_task(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetTask_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync.py deleted file mode 100644 index 04c1ea91610e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_get_task_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_GetTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_get_task(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1_generated_BatchService_GetTask_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_async.py deleted file mode 100644 index 68f7201398ab..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_list_jobs(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync.py deleted file mode 100644 index 0621570b880a..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_jobs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_list_jobs(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_async.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_async.py deleted file mode 100644 index 32642941a921..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListTasks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -async def sample_list_tasks(): - # Create a client - client = batch_v1.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListTasks_async] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync.py b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync.py deleted file mode 100644 index 5f17c206935a..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/batch_v1_generated_batch_service_list_tasks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1_generated_BatchService_ListTasks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1 - - -def sample_list_tasks(): - # Create a client - client = batch_v1.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1_generated_BatchService_ListTasks_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json deleted file mode 100644 index f80cbfd5e2e8..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ /dev/null @@ -1,1158 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.batch.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-batch", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.cancel_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.CancelJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "batch_v1_generated_batch_service_cancel_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_CancelJob_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_cancel_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.cancel_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.CancelJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "batch_v1_generated_batch_service_cancel_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_CancelJob_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_cancel_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.create_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.CreateJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.batch_v1.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "batch_v1_generated_batch_service_create_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_CreateJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_create_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.create_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.CreateJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.batch_v1.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "batch_v1_generated_batch_service_create_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_CreateJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_create_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.delete_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.DeleteJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "batch_v1_generated_batch_service_delete_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_DeleteJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_delete_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.delete_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.DeleteJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "batch_v1_generated_batch_service_delete_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_DeleteJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_delete_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.get_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.GetJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "batch_v1_generated_batch_service_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_GetJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.get_job", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.GetJob", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "batch_v1_generated_batch_service_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.get_task", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.GetTask", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "batch_v1_generated_batch_service_get_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_GetTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_get_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.get_task", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.GetTask", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "batch_v1_generated_batch_service_get_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_GetTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_get_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.ListJobs", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "batch_v1_generated_batch_service_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_ListJobs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.ListJobs", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "batch_v1_generated_batch_service_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_ListJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceAsyncClient.list_tasks", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.ListTasks", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListTasksAsyncPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "batch_v1_generated_batch_service_list_tasks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_ListTasks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_list_tasks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1.BatchServiceClient.list_tasks", - "method": { - "fullName": "google.cloud.batch.v1.BatchService.ListTasks", - "service": { - "fullName": "google.cloud.batch.v1.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1.services.batch_service.pagers.ListTasksPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "batch_v1_generated_batch_service_list_tasks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1_generated_BatchService_ListTasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1_generated_batch_service_list_tasks_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-batch/v1/scripts/fixup_batch_v1_keywords.py b/owl-bot-staging/google-cloud-batch/v1/scripts/fixup_batch_v1_keywords.py deleted file mode 100644 index 4b275b189c8e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/scripts/fixup_batch_v1_keywords.py +++ /dev/null @@ -1,182 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class batchCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('name', 'request_id', ), - 'create_job': ('parent', 'job', 'job_id', 'request_id', ), - 'delete_job': ('name', 'reason', 'request_id', ), - 'get_job': ('name', ), - 'get_task': ('name', ), - 'list_jobs': ('parent', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_tasks': ('parent', 'filter', 'page_size', 'page_token', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=batchCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the batch client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-batch/v1/setup.py b/owl-bot-staging/google-cloud-batch/v1/setup.py deleted file mode 100644 index 9bbd8ec5aefa..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-batch' - - -description = "Google Cloud Batch API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/batch/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1/tests/__init__.py b/owl-bot-staging/google-cloud-batch/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-batch/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/batch_v1/__init__.py b/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/batch_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/batch_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/batch_v1/test_batch_service.py b/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/batch_v1/test_batch_service.py deleted file mode 100644 index 5b1c1105b2e2..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1/tests/unit/gapic/batch_v1/test_batch_service.py +++ /dev/null @@ -1,7817 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.batch_v1.services.batch_service import BatchServiceAsyncClient -from google.cloud.batch_v1.services.batch_service import BatchServiceClient -from google.cloud.batch_v1.services.batch_service import pagers -from google.cloud.batch_v1.services.batch_service import transports -from google.cloud.batch_v1.types import batch -from google.cloud.batch_v1.types import job -from google.cloud.batch_v1.types import job as gcb_job -from google.cloud.batch_v1.types import task -from google.cloud.batch_v1.types import volume -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BatchServiceClient._get_default_mtls_endpoint(None) is None - assert BatchServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert BatchServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BatchServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BatchServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BatchServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BatchServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BatchServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BatchServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - BatchServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BatchServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert BatchServiceClient._get_client_cert_source(None, False) is None - assert BatchServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BatchServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BatchServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BatchServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = BatchServiceClient._DEFAULT_UNIVERSE - default_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert BatchServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BatchServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BatchServiceClient.DEFAULT_MTLS_ENDPOINT - assert BatchServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BatchServiceClient._get_api_endpoint(None, None, default_universe, "always") == BatchServiceClient.DEFAULT_MTLS_ENDPOINT - assert BatchServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BatchServiceClient.DEFAULT_MTLS_ENDPOINT - assert BatchServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BatchServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - BatchServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert BatchServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BatchServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BatchServiceClient._get_universe_domain(None, None) == BatchServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - BatchServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = BatchServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = BatchServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (BatchServiceClient, "grpc"), - (BatchServiceAsyncClient, "grpc_asyncio"), - (BatchServiceClient, "rest"), -]) -def test_batch_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'batch.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://batch.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BatchServiceGrpcTransport, "grpc"), - (transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BatchServiceRestTransport, "rest"), -]) -def test_batch_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BatchServiceClient, "grpc"), - (BatchServiceAsyncClient, "grpc_asyncio"), - (BatchServiceClient, "rest"), -]) -def test_batch_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'batch.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://batch.googleapis.com' - ) - - -def test_batch_service_client_get_transport_class(): - transport = BatchServiceClient.get_transport_class() - available_transports = [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceRestTransport, - ] - assert transport in available_transports - - transport = BatchServiceClient.get_transport_class("grpc") - assert transport == transports.BatchServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest"), -]) -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -def test_batch_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BatchServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BatchServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", "true"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", "false"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest", "true"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_batch_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BatchServiceClient, BatchServiceAsyncClient -]) -@mock.patch.object(BatchServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchServiceAsyncClient)) -def test_batch_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - BatchServiceClient, BatchServiceAsyncClient -]) -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -def test_batch_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = BatchServiceClient._DEFAULT_UNIVERSE - default_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest"), -]) -def test_batch_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", grpc_helpers), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest", None), -]) -def test_batch_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_batch_service_client_client_options_from_dict(): - with mock.patch('google.cloud.batch_v1.services.batch_service.transports.BatchServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BatchServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", grpc_helpers), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_batch_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "batch.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="batch.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - batch.CreateJobRequest, - dict, -]) -def test_create_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.CreateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - - -def test_create_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.CreateJobRequest( - parent='parent_value', - job_id='job_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateJobRequest( - parent='parent_value', - job_id='job_id_value', - request_id='request_id_value', - ) - -def test_create_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc - request = {} - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_job] = mock_rpc - - request = {} - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=batch.CreateJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - )) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.CreateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - -def test_create_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = gcb_job.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = gcb_job.Job(name='name_value') - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - - -def test_create_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - batch.CreateJobRequest(), - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = gcb_job.Job(name='name_value') - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - batch.CreateJobRequest(), - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = job.Job( - name='name_value', - uid='uid_value', - priority=898, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - - -def test_get_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetJobRequest( - name='name_value', - ) - -def test_get_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc - - request = {} - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=batch.GetJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(job.Job( - name='name_value', - uid='uid_value', - priority=898, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - -def test_get_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = job.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = job.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = job.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - batch.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.DeleteJobRequest, - dict, -]) -def test_delete_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.DeleteJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.DeleteJobRequest( - name='name_value', - reason='reason_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteJobRequest( - name='name_value', - reason='reason_value', - request_id='request_id_value', - ) - -def test_delete_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc - request = {} - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_job] = mock_rpc - - request = {} - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=batch.DeleteJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.DeleteJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - -def test_delete_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - batch.DeleteJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.CancelJobRequest, - dict, -]) -def test_cancel_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_cancel_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.CancelJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.CancelJobRequest( - name='name_value', - ) - -def test_cancel_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc - - request = {} - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=batch.CancelJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_cancel_job_async_from_dict(): - await test_cancel_job_async(request_type=dict) - -def test_cancel_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - batch.CancelJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_job( - batch.CancelJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListJobsRequest( - parent='parent_value', - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest( - parent='parent_value', - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', - ) - -def test_list_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc - - request = {} - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=batch.ListJobsRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - -def test_list_jobs_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = batch.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - batch.ListJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - batch.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, job.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, job.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - batch.GetTaskRequest, - dict, -]) -def test_get_task(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - - -def test_get_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest( - name='name_value', - ) - -def test_get_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_task] = mock_rpc - - request = {} - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=batch.GetTaskRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - -def test_get_task_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_task_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - batch.GetTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - batch.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.ListTasksRequest, - dict, -]) -def test_list_tasks(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_tasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListTasksRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_tasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_tasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_tasks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_tasks] = mock_rpc - - request = {} - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=batch.ListTasksRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - -def test_list_tasks_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = batch.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tasks_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - batch.ListTasksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - batch.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) -def test_list_tasks_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc - - request = {} - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("job_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("jobId", "requestId", )) & set(("parent", "job", ))) - - -def test_create_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) - - -def test_create_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - batch.CreateJobRequest(), - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - -def test_get_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = job.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_get_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), - name='name_value', - ) - - -def test_delete_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc - - request = {} - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_delete_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), - name='name_value', - ) - - -def test_cancel_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_cancel_job_rest_required_fields(request_type=batch.CancelJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_cancel_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.cancel_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*}:cancel" % client.transport._host, args[1]) - - -def test_cancel_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - batch.CancelJobRequest(), - name='name_value', - ) - - -def test_list_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_jobs_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = batch.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) - - -def test_list_jobs_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - batch.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_rest_pager(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(batch.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, job.Job) - for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_task_rest_required_fields(request_type=batch.GetTaskRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = task.Task() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = task.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_task(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_task_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_task_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = task.Task() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4/tasks/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = task.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/jobs/*/taskGroups/*/tasks/*}" % client.transport._host, args[1]) - - -def test_get_task_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - batch.GetTaskRequest(), - name='name_value', - ) - - -def test_list_tasks_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_tasks_rest_required_fields(request_type=batch.ListTasksRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = batch.ListTasksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_tasks(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_tasks_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_tasks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_tasks_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListTasksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = batch.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_tasks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/jobs/*/taskGroups/*}/tasks" % client.transport._host, args[1]) - - -def test_list_tasks_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - batch.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_rest_pager(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(batch.ListTasksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - - pager = client.list_tasks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) - - pages = list(client.list_tasks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BatchServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BatchServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceGrpcAsyncIOTransport, - transports.BatchServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = BatchServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = gcb_job.Job() - client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = job.Job() - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = batch.ListJobsResponse() - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = batch.ListTasksResponse() - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListTasksRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = BatchServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - )) - await client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job( - name='name_value', - uid='uid_value', - priority=898, - )) - await client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_jobs_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_task_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - )) - await client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tasks_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListTasksRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = BatchServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_job_rest_bad_request(request_type=batch.CreateJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.CreateJobRequest, - dict, -]) -def test_create_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["job"] = {'name': 'name_value', 'uid': 'uid_value', 'priority': 898, 'task_groups': [{'name': 'name_value', 'task_spec': {'runnables': [{'container': {'image_uri': 'image_uri_value', 'commands': ['commands_value1', 'commands_value2'], 'entrypoint': 'entrypoint_value', 'volumes': ['volumes_value1', 'volumes_value2'], 'options': 'options_value', 'block_external_network': True, 'username': 'username_value', 'password': 'password_value', 'enable_image_streaming': True}, 'script': {'path': 'path_value', 'text': 'text_value'}, 'barrier': {'name': 'name_value'}, 'display_name': 'display_name_value', 'ignore_exit_status': True, 'background': True, 'always_run': True, 'environment': {'variables': {}, 'secret_variables': {}, 'encrypted_variables': {'key_name': 'key_name_value', 'cipher_text': 'cipher_text_value'}}, 'timeout': {'seconds': 751, 'nanos': 543}, 'labels': {}}], 'compute_resource': {'cpu_milli': 958, 'memory_mib': 1072, 'boot_disk_mib': 1365}, 'max_run_duration': {}, 'max_retry_count': 1635, 'lifecycle_policies': [{'action': 1, 'action_condition': {'exit_codes': [1064, 1065]}}], 'environments': {}, 'volumes': [{'nfs': {'server': 'server_value', 'remote_path': 'remote_path_value'}, 'gcs': {'remote_path': 'remote_path_value'}, 'device_name': 'device_name_value', 'mount_path': 'mount_path_value', 'mount_options': ['mount_options_value1', 'mount_options_value2']}], 'environment': {}}, 'task_count': 1083, 'parallelism': 1174, 'scheduling_policy': 1, 'task_environments': {}, 'task_count_per_node': 2022, 'require_hosts_file': True, 'permissive_ssh': True, 'run_as_non_root': True}], 'allocation_policy': {'location': {'allowed_locations': ['allowed_locations_value1', 'allowed_locations_value2']}, 'instances': [{'policy': {'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value', 'provisioning_model': 1, 'accelerators': [{'type_': 'type__value', 'count': 553, 'install_gpu_drivers': True, 'driver_version': 'driver_version_value'}], 'boot_disk': {'image': 'image_value', 'snapshot': 'snapshot_value', 'type_': 'type__value', 'size_gb': 739, 'disk_interface': 'disk_interface_value'}, 'disks': [{'new_disk': {}, 'existing_disk': 'existing_disk_value', 'device_name': 'device_name_value'}], 'reservation': 'reservation_value'}, 'instance_template': 'instance_template_value', 'install_gpu_drivers': True, 'install_ops_agent': True, 'block_project_ssh_keys': True}], 'service_account': {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}, 'labels': {}, 'network': {'network_interfaces': [{'network': 'network_value', 'subnetwork': 'subnetwork_value', 'no_external_ip_address': True}]}, 'placement': {'collocation': 'collocation_value', 'max_distance': 1264}, 'tags': ['tags_value1', 'tags_value2']}, 'labels': {}, 'status': {'state': 1, 'status_events': [{'type_': 'type__value', 'description': 'description_value', 'event_time': {'seconds': 751, 'nanos': 543}, 'task_execution': {'exit_code': 948}, 'task_state': 1}], 'task_groups': {}, 'run_duration': {}}, 'create_time': {}, 'update_time': {}, 'logs_policy': {'destination': 1, 'logs_path': 'logs_path_value', 'cloud_logging_option': {'use_generic_task_monitored_resource': True}}, 'notifications': [{'pubsub_topic': 'pubsub_topic_value', 'message': {'type_': 1, 'new_job_state': 1, 'new_task_state': 1}}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = batch.CreateJobRequest.meta.fields["job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_create_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_create_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_create_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_job.Job.to_json(gcb_job.Job()) - req.return_value.content = return_value - - request = batch.CreateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_job.Job() - post_with_metadata.return_value = gcb_job.Job(), metadata - - client.create_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_job_rest_bad_request(request_type=batch.GetJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.GetJobRequest, - dict, -]) -def test_get_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = job.Job( - name='name_value', - uid='uid_value', - priority=898, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_get_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = job.Job.to_json(job.Job()) - req.return_value.content = return_value - - request = batch.GetJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = job.Job() - post_with_metadata.return_value = job.Job(), metadata - - client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_job_rest_bad_request(request_type=batch.DeleteJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.DeleteJobRequest, - dict, -]) -def test_delete_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_job(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_delete_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_delete_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_delete_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = batch.DeleteJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_job_rest_bad_request(request_type=batch.CancelJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.CancelJobRequest, - dict, -]) -def test_cancel_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_job(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_cancel_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_cancel_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_cancel_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.CancelJobRequest.pb(batch.CancelJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = batch.CancelJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_jobs_rest_bad_request(request_type=batch.ListJobsRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - batch.ListJobsRequest, - dict, -]) -def test_list_jobs_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_jobs_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_jobs") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.ListJobsRequest.pb(batch.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = batch.ListJobsResponse.to_json(batch.ListJobsResponse()) - req.return_value.content = return_value - - request = batch.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batch.ListJobsResponse() - post_with_metadata.return_value = batch.ListJobsResponse(), metadata - - client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_task_rest_bad_request(request_type=batch.GetTaskRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4/tasks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_task(request) - - -@pytest.mark.parametrize("request_type", [ - batch.GetTaskRequest, - dict, -]) -def test_get_task_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4/tasks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = task.Task( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = task.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_task(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_task_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_task") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_get_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.GetTaskRequest.pb(batch.GetTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = task.Task.to_json(task.Task()) - req.return_value.content = return_value - - request = batch.GetTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = task.Task() - post_with_metadata.return_value = task.Task(), metadata - - client.get_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_tasks_rest_bad_request(request_type=batch.ListTasksRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_tasks(request) - - -@pytest.mark.parametrize("request_type", [ - batch.ListTasksRequest, - dict, -]) -def test_list_tasks_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_tasks(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tasks_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_tasks") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_tasks_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_list_tasks") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.ListTasksRequest.pb(batch.ListTasksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = batch.ListTasksResponse.to_json(batch.ListTasksResponse()) - req.return_value.content = return_value - - request = batch.ListTasksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batch.ListTasksResponse() - post_with_metadata.return_value = batch.ListTasksResponse(), metadata - - client.list_tasks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListTasksRequest() - - assert args[0] == request_msg - - -def test_batch_service_rest_lro_client(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BatchServiceGrpcTransport, - ) - -def test_batch_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BatchServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_batch_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.batch_v1.services.batch_service.transports.BatchServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BatchServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job', - 'get_job', - 'delete_job', - 'cancel_job', - 'list_jobs', - 'get_task', - 'list_tasks', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_batch_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.batch_v1.services.batch_service.transports.BatchServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BatchServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_batch_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.batch_v1.services.batch_service.transports.BatchServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BatchServiceTransport() - adc.assert_called_once() - - -def test_batch_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BatchServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceGrpcAsyncIOTransport, - ], -) -def test_batch_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceGrpcAsyncIOTransport, - transports.BatchServiceRestTransport, - ], -) -def test_batch_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BatchServiceGrpcTransport, grpc_helpers), - (transports.BatchServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_batch_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "batch.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="batch.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BatchServiceGrpcTransport, transports.BatchServiceGrpcAsyncIOTransport]) -def test_batch_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_batch_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BatchServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_batch_service_host_no_port(transport_name): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='batch.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'batch.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://batch.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_batch_service_host_with_port(transport_name): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='batch.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'batch.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://batch.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_batch_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = BatchServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = BatchServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_job._session - session2 = client2.transport.create_job._session - assert session1 != session2 - session1 = client1.transport.get_job._session - session2 = client2.transport.get_job._session - assert session1 != session2 - session1 = client1.transport.delete_job._session - session2 = client2.transport.delete_job._session - assert session1 != session2 - session1 = client1.transport.cancel_job._session - session2 = client2.transport.cancel_job._session - assert session1 != session2 - session1 = client1.transport.list_jobs._session - session2 = client2.transport.list_jobs._session - assert session1 != session2 - session1 = client1.transport.get_task._session - session2 = client2.transport.get_task._session - assert session1 != session2 - session1 = client1.transport.list_tasks._session - session2 = client2.transport.list_tasks._session - assert session1 != session2 -def test_batch_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BatchServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_batch_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BatchServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BatchServiceGrpcTransport, transports.BatchServiceGrpcAsyncIOTransport]) -def test_batch_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BatchServiceGrpcTransport, transports.BatchServiceGrpcAsyncIOTransport]) -def test_batch_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_batch_service_grpc_lro_client(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_batch_service_grpc_lro_async_client(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_job_path(): - project = "squid" - location = "clam" - job = "whelk" - expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - actual = BatchServiceClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "job": "nudibranch", - } - path = BatchServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_job_path(path) - assert expected == actual - -def test_task_path(): - project = "cuttlefish" - location = "mussel" - job = "winkle" - task_group = "nautilus" - task = "scallop" - expected = "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}/tasks/{task}".format(project=project, location=location, job=job, task_group=task_group, task=task, ) - actual = BatchServiceClient.task_path(project, location, job, task_group, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "abalone", - "location": "squid", - "job": "clam", - "task_group": "whelk", - "task": "octopus", - } - path = BatchServiceClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_task_path(path) - assert expected == actual - -def test_task_group_path(): - project = "oyster" - location = "nudibranch" - job = "cuttlefish" - task_group = "mussel" - expected = "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}".format(project=project, location=location, job=job, task_group=task_group, ) - actual = BatchServiceClient.task_group_path(project, location, job, task_group) - assert expected == actual - - -def test_parse_task_group_path(): - expected = { - "project": "winkle", - "location": "nautilus", - "job": "scallop", - "task_group": "abalone", - } - path = BatchServiceClient.task_group_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_task_group_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BatchServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = BatchServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = BatchServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = BatchServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BatchServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = BatchServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = BatchServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = BatchServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BatchServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = BatchServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BatchServiceTransport, '_prep_wrapped_messages') as prep: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BatchServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = BatchServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/.coveragerc b/owl-bot-staging/google-cloud-batch/v1alpha/.coveragerc deleted file mode 100644 index e17b1e03c7a6..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/batch/__init__.py - google/cloud/batch/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/.flake8 b/owl-bot-staging/google-cloud-batch/v1alpha/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/MANIFEST.in b/owl-bot-staging/google-cloud-batch/v1alpha/MANIFEST.in deleted file mode 100644 index c8e43b7cfbfb..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/batch *.py -recursive-include google/cloud/batch_v1alpha *.py diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/README.rst b/owl-bot-staging/google-cloud-batch/v1alpha/README.rst deleted file mode 100644 index 73c2dd5d3ee8..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Batch API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Batch API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/docs/_static/custom.css b/owl-bot-staging/google-cloud-batch/v1alpha/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/batch_service.rst b/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/batch_service.rst deleted file mode 100644 index abed82d29c87..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/batch_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -BatchService ------------------------------- - -.. automodule:: google.cloud.batch_v1alpha.services.batch_service - :members: - :inherited-members: - -.. automodule:: google.cloud.batch_v1alpha.services.batch_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/services_.rst b/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/services_.rst deleted file mode 100644 index 5bc73cef457c..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Batch v1alpha API -=========================================== -.. toctree:: - :maxdepth: 2 - - batch_service diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/types_.rst b/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/types_.rst deleted file mode 100644 index c93a1c493863..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/docs/batch_v1alpha/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Batch v1alpha API -======================================== - -.. automodule:: google.cloud.batch_v1alpha.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/docs/conf.py b/owl-bot-staging/google-cloud-batch/v1alpha/docs/conf.py deleted file mode 100644 index cf072699db6e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-batch documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-batch" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-batch-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-batch.tex", - u"google-cloud-batch Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-batch", - u"Google Cloud Batch Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-batch", - u"google-cloud-batch Documentation", - author, - "google-cloud-batch", - "GAPIC library for Google Cloud Batch API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/docs/index.rst b/owl-bot-staging/google-cloud-batch/v1alpha/docs/index.rst deleted file mode 100644 index d28c672355ae..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - batch_v1alpha/services_ - batch_v1alpha/types_ diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/__init__.py deleted file mode 100644 index 727bb2b44742..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/__init__.py +++ /dev/null @@ -1,123 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.batch import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.batch_v1alpha.services.batch_service.client import BatchServiceClient -from google.cloud.batch_v1alpha.services.batch_service.async_client import BatchServiceAsyncClient - -from google.cloud.batch_v1alpha.types.batch import CancelJobRequest -from google.cloud.batch_v1alpha.types.batch import CancelJobResponse -from google.cloud.batch_v1alpha.types.batch import CreateJobRequest -from google.cloud.batch_v1alpha.types.batch import CreateResourceAllowanceRequest -from google.cloud.batch_v1alpha.types.batch import DeleteJobRequest -from google.cloud.batch_v1alpha.types.batch import DeleteResourceAllowanceRequest -from google.cloud.batch_v1alpha.types.batch import GetJobRequest -from google.cloud.batch_v1alpha.types.batch import GetResourceAllowanceRequest -from google.cloud.batch_v1alpha.types.batch import GetTaskRequest -from google.cloud.batch_v1alpha.types.batch import ListJobsRequest -from google.cloud.batch_v1alpha.types.batch import ListJobsResponse -from google.cloud.batch_v1alpha.types.batch import ListResourceAllowancesRequest -from google.cloud.batch_v1alpha.types.batch import ListResourceAllowancesResponse -from google.cloud.batch_v1alpha.types.batch import ListTasksRequest -from google.cloud.batch_v1alpha.types.batch import ListTasksResponse -from google.cloud.batch_v1alpha.types.batch import OperationMetadata -from google.cloud.batch_v1alpha.types.batch import UpdateJobRequest -from google.cloud.batch_v1alpha.types.batch import UpdateResourceAllowanceRequest -from google.cloud.batch_v1alpha.types.job import AllocationPolicy -from google.cloud.batch_v1alpha.types.job import Job -from google.cloud.batch_v1alpha.types.job import JobDependency -from google.cloud.batch_v1alpha.types.job import JobNotification -from google.cloud.batch_v1alpha.types.job import JobStatus -from google.cloud.batch_v1alpha.types.job import LogsPolicy -from google.cloud.batch_v1alpha.types.job import ResourceUsage -from google.cloud.batch_v1alpha.types.job import ServiceAccount -from google.cloud.batch_v1alpha.types.job import TaskGroup -from google.cloud.batch_v1alpha.types.notification import Notification -from google.cloud.batch_v1alpha.types.resource_allowance import ResourceAllowance -from google.cloud.batch_v1alpha.types.resource_allowance import UsageResourceAllowance -from google.cloud.batch_v1alpha.types.resource_allowance import UsageResourceAllowanceSpec -from google.cloud.batch_v1alpha.types.resource_allowance import UsageResourceAllowanceStatus -from google.cloud.batch_v1alpha.types.resource_allowance import CalendarPeriod -from google.cloud.batch_v1alpha.types.resource_allowance import ResourceAllowanceState -from google.cloud.batch_v1alpha.types.task import ComputeResource -from google.cloud.batch_v1alpha.types.task import Environment -from google.cloud.batch_v1alpha.types.task import LifecyclePolicy -from google.cloud.batch_v1alpha.types.task import Runnable -from google.cloud.batch_v1alpha.types.task import StatusEvent -from google.cloud.batch_v1alpha.types.task import Task -from google.cloud.batch_v1alpha.types.task import TaskExecution -from google.cloud.batch_v1alpha.types.task import TaskResourceUsage -from google.cloud.batch_v1alpha.types.task import TaskSpec -from google.cloud.batch_v1alpha.types.task import TaskStatus -from google.cloud.batch_v1alpha.types.volume import GCS -from google.cloud.batch_v1alpha.types.volume import NFS -from google.cloud.batch_v1alpha.types.volume import PD -from google.cloud.batch_v1alpha.types.volume import Volume - -__all__ = ('BatchServiceClient', - 'BatchServiceAsyncClient', - 'CancelJobRequest', - 'CancelJobResponse', - 'CreateJobRequest', - 'CreateResourceAllowanceRequest', - 'DeleteJobRequest', - 'DeleteResourceAllowanceRequest', - 'GetJobRequest', - 'GetResourceAllowanceRequest', - 'GetTaskRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListResourceAllowancesRequest', - 'ListResourceAllowancesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'OperationMetadata', - 'UpdateJobRequest', - 'UpdateResourceAllowanceRequest', - 'AllocationPolicy', - 'Job', - 'JobDependency', - 'JobNotification', - 'JobStatus', - 'LogsPolicy', - 'ResourceUsage', - 'ServiceAccount', - 'TaskGroup', - 'Notification', - 'ResourceAllowance', - 'UsageResourceAllowance', - 'UsageResourceAllowanceSpec', - 'UsageResourceAllowanceStatus', - 'CalendarPeriod', - 'ResourceAllowanceState', - 'ComputeResource', - 'Environment', - 'LifecyclePolicy', - 'Runnable', - 'StatusEvent', - 'Task', - 'TaskExecution', - 'TaskResourceUsage', - 'TaskSpec', - 'TaskStatus', - 'GCS', - 'NFS', - 'PD', - 'Volume', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/gapic_version.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/py.typed b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/py.typed deleted file mode 100644 index 32c66c8cc211..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-batch package uses inline types. diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/__init__.py deleted file mode 100644 index a9b5b64b039e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/__init__.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.batch_v1alpha import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.batch_service import BatchServiceClient -from .services.batch_service import BatchServiceAsyncClient - -from .types.batch import CancelJobRequest -from .types.batch import CancelJobResponse -from .types.batch import CreateJobRequest -from .types.batch import CreateResourceAllowanceRequest -from .types.batch import DeleteJobRequest -from .types.batch import DeleteResourceAllowanceRequest -from .types.batch import GetJobRequest -from .types.batch import GetResourceAllowanceRequest -from .types.batch import GetTaskRequest -from .types.batch import ListJobsRequest -from .types.batch import ListJobsResponse -from .types.batch import ListResourceAllowancesRequest -from .types.batch import ListResourceAllowancesResponse -from .types.batch import ListTasksRequest -from .types.batch import ListTasksResponse -from .types.batch import OperationMetadata -from .types.batch import UpdateJobRequest -from .types.batch import UpdateResourceAllowanceRequest -from .types.job import AllocationPolicy -from .types.job import Job -from .types.job import JobDependency -from .types.job import JobNotification -from .types.job import JobStatus -from .types.job import LogsPolicy -from .types.job import ResourceUsage -from .types.job import ServiceAccount -from .types.job import TaskGroup -from .types.notification import Notification -from .types.resource_allowance import ResourceAllowance -from .types.resource_allowance import UsageResourceAllowance -from .types.resource_allowance import UsageResourceAllowanceSpec -from .types.resource_allowance import UsageResourceAllowanceStatus -from .types.resource_allowance import CalendarPeriod -from .types.resource_allowance import ResourceAllowanceState -from .types.task import ComputeResource -from .types.task import Environment -from .types.task import LifecyclePolicy -from .types.task import Runnable -from .types.task import StatusEvent -from .types.task import Task -from .types.task import TaskExecution -from .types.task import TaskResourceUsage -from .types.task import TaskSpec -from .types.task import TaskStatus -from .types.volume import GCS -from .types.volume import NFS -from .types.volume import PD -from .types.volume import Volume - -__all__ = ( - 'BatchServiceAsyncClient', -'AllocationPolicy', -'BatchServiceClient', -'CalendarPeriod', -'CancelJobRequest', -'CancelJobResponse', -'ComputeResource', -'CreateJobRequest', -'CreateResourceAllowanceRequest', -'DeleteJobRequest', -'DeleteResourceAllowanceRequest', -'Environment', -'GCS', -'GetJobRequest', -'GetResourceAllowanceRequest', -'GetTaskRequest', -'Job', -'JobDependency', -'JobNotification', -'JobStatus', -'LifecyclePolicy', -'ListJobsRequest', -'ListJobsResponse', -'ListResourceAllowancesRequest', -'ListResourceAllowancesResponse', -'ListTasksRequest', -'ListTasksResponse', -'LogsPolicy', -'NFS', -'Notification', -'OperationMetadata', -'PD', -'ResourceAllowance', -'ResourceAllowanceState', -'ResourceUsage', -'Runnable', -'ServiceAccount', -'StatusEvent', -'Task', -'TaskExecution', -'TaskGroup', -'TaskResourceUsage', -'TaskSpec', -'TaskStatus', -'UpdateJobRequest', -'UpdateResourceAllowanceRequest', -'UsageResourceAllowance', -'UsageResourceAllowanceSpec', -'UsageResourceAllowanceStatus', -'Volume', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/gapic_metadata.json b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/gapic_metadata.json deleted file mode 100644 index d2eeeb1176cf..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/gapic_metadata.json +++ /dev/null @@ -1,223 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.batch_v1alpha", - "protoPackage": "google.cloud.batch.v1alpha", - "schema": "1.0", - "services": { - "BatchService": { - "clients": { - "grpc": { - "libraryClient": "BatchServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateResourceAllowance": { - "methods": [ - "create_resource_allowance" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteResourceAllowance": { - "methods": [ - "delete_resource_allowance" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetResourceAllowance": { - "methods": [ - "get_resource_allowance" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListResourceAllowances": { - "methods": [ - "list_resource_allowances" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - }, - "UpdateResourceAllowance": { - "methods": [ - "update_resource_allowance" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BatchServiceAsyncClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateResourceAllowance": { - "methods": [ - "create_resource_allowance" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteResourceAllowance": { - "methods": [ - "delete_resource_allowance" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetResourceAllowance": { - "methods": [ - "get_resource_allowance" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListResourceAllowances": { - "methods": [ - "list_resource_allowances" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - }, - "UpdateResourceAllowance": { - "methods": [ - "update_resource_allowance" - ] - } - } - }, - "rest": { - "libraryClient": "BatchServiceClient", - "rpcs": { - "CancelJob": { - "methods": [ - "cancel_job" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateResourceAllowance": { - "methods": [ - "create_resource_allowance" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteResourceAllowance": { - "methods": [ - "delete_resource_allowance" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetResourceAllowance": { - "methods": [ - "get_resource_allowance" - ] - }, - "GetTask": { - "methods": [ - "get_task" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "ListResourceAllowances": { - "methods": [ - "list_resource_allowances" - ] - }, - "ListTasks": { - "methods": [ - "list_tasks" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - }, - "UpdateResourceAllowance": { - "methods": [ - "update_resource_allowance" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/gapic_version.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/py.typed b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/py.typed deleted file mode 100644 index 32c66c8cc211..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-batch package uses inline types. diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/__init__.py deleted file mode 100644 index 17519904ba19..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BatchServiceClient -from .async_client import BatchServiceAsyncClient - -__all__ = ( - 'BatchServiceClient', - 'BatchServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/async_client.py deleted file mode 100644 index 8f35bd8477b3..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ /dev/null @@ -1,2162 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.batch_v1alpha import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1alpha.services.batch_service import pagers -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import notification -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .client import BatchServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class BatchServiceAsyncClient: - """Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - """ - - _client: BatchServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = BatchServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BatchServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = BatchServiceClient._DEFAULT_UNIVERSE - - job_path = staticmethod(BatchServiceClient.job_path) - parse_job_path = staticmethod(BatchServiceClient.parse_job_path) - resource_allowance_path = staticmethod(BatchServiceClient.resource_allowance_path) - parse_resource_allowance_path = staticmethod(BatchServiceClient.parse_resource_allowance_path) - task_path = staticmethod(BatchServiceClient.task_path) - parse_task_path = staticmethod(BatchServiceClient.parse_task_path) - task_group_path = staticmethod(BatchServiceClient.task_group_path) - parse_task_group_path = staticmethod(BatchServiceClient.parse_task_group_path) - common_billing_account_path = staticmethod(BatchServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BatchServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BatchServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(BatchServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(BatchServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(BatchServiceClient.parse_common_organization_path) - common_project_path = staticmethod(BatchServiceClient.common_project_path) - parse_common_project_path = staticmethod(BatchServiceClient.parse_common_project_path) - common_location_path = staticmethod(BatchServiceClient.common_location_path) - parse_common_location_path = staticmethod(BatchServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceAsyncClient: The constructed client. - """ - return BatchServiceClient.from_service_account_info.__func__(BatchServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceAsyncClient: The constructed client. - """ - return BatchServiceClient.from_service_account_file.__func__(BatchServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BatchServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BatchServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BatchServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = BatchServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BatchServiceTransport, Callable[..., BatchServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the batch service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BatchServiceTransport,Callable[..., BatchServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BatchServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BatchServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.batch_v1alpha.BatchServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "credentialsType": None, - } - ) - - async def create_job(self, - request: Optional[Union[batch.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[gcb_job.Job] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_job.Job: - r"""Create a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_create_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.CreateJobRequest, dict]]): - The request object. CreateJob Request. - parent (:class:`str`): - Required. The parent resource name - where the Job will be created. Pattern: - "projects/{project}/locations/{location}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.batch_v1alpha.types.Job`): - Required. The Job to create. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (:class:`str`): - ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters - and must start with lowercase characters. Only lowercase - characters, numbers and '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. - - The job.name field in the request will be ignored and - the created resource name of the Job will be - "{parent}/jobs/{job_id}". - - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job, job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CreateJobRequest): - request = batch.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[batch.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> job.Job: - r"""Get a Job specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_get_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.GetJobRequest, dict]]): - The request object. GetJob Request. - name (:class:`str`): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetJobRequest): - request = batch.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: Optional[Union[batch.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_delete_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.DeleteJobRequest, dict]]): - The request object. DeleteJob Request. - name (:class:`str`): - Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.DeleteJobRequest): - request = batch.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - async def cancel_job(self, - request: Optional[Union[batch.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Cancel a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_cancel_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.CancelJobRequest, dict]]): - The request object. CancelJob Request. - name (:class:`str`): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.batch_v1alpha.types.CancelJobResponse` - Response to the CancelJob request. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CancelJobRequest): - request = batch.CancelJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - batch.CancelJobResponse, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - async def update_job(self, - request: Optional[Union[batch.UpdateJobRequest, dict]] = None, - *, - job: Optional[gcb_job.Job] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_job.Job: - r"""Update a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_update_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.UpdateJobRequest( - ) - - # Make the request - response = await client.update_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.UpdateJobRequest, dict]]): - The request object. UpdateJob Request. - job (:class:`google.cloud.batch_v1alpha.types.Job`): - Required. The Job to update. Only fields specified in - ``updateMask`` are updated. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - - The ``jobs.patch`` method can only be used while a job - is in the ``QUEUED``, ``SCHEDULED``, or ``RUNNING`` - state and currently only supports increasing the value - of the first ``taskCount`` field in the job's - ``taskGroups`` field. Therefore, you must set the value - of ``updateMask`` to ``taskGroups``. Any other job - fields in the update request will be ignored. - - For example, to update a job's ``taskCount`` to ``2``, - set ``updateMask`` to ``taskGroups`` and use the - following request body: - - :: - - { - "taskGroups":[{ - "taskCount": 2 - }] - } - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [job, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.UpdateJobRequest): - request = batch.UpdateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if job is not None: - request.job = job - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("job.name", request.job.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[batch.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""List all Jobs for a project within a region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_list_jobs(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.ListJobsRequest, dict]]): - The request object. ListJob Request. - parent (:class:`str`): - Parent path. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsAsyncPager: - ListJob Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListJobsRequest): - request = batch.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_task(self, - request: Optional[Union[batch.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> task.Task: - r"""Return a single Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_get_task(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.GetTaskRequest, dict]]): - The request object. Request for a single Task by name. - name (:class:`str`): - Required. Task name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Task: - A Cloud Batch task. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetTaskRequest): - request = batch.GetTaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tasks(self, - request: Optional[Union[batch.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTasksAsyncPager: - r"""List Tasks associated with a job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_list_tasks(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.ListTasksRequest, dict]]): - The request object. ListTasks Request. - parent (:class:`str`): - Required. Name of a TaskGroup from which Tasks are being - requested. Pattern: - "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksAsyncPager: - ListTasks Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListTasksRequest): - request = batch.ListTasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTasksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_resource_allowance(self, - request: Optional[Union[batch.CreateResourceAllowanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - resource_allowance: Optional[gcb_resource_allowance.ResourceAllowance] = None, - resource_allowance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_resource_allowance.ResourceAllowance: - r"""Create a Resource Allowance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_create_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.CreateResourceAllowanceRequest( - parent="parent_value", - resource_allowance=resource_allowance, - ) - - # Make the request - response = await client.create_resource_allowance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.CreateResourceAllowanceRequest, dict]]): - The request object. CreateResourceAllowance Request. - parent (:class:`str`): - Required. The parent resource name - where the ResourceAllowance will be - created. Pattern: - "projects/{project}/locations/{location}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource_allowance (:class:`google.cloud.batch_v1alpha.types.ResourceAllowance`): - Required. The ResourceAllowance to - create. - - This corresponds to the ``resource_allowance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource_allowance_id (:class:`str`): - ID used to uniquely identify the ResourceAllowance - within its parent scope. This field should contain at - most 63 characters and must start with lowercase - characters. Only lowercase characters, numbers and '-' - are accepted. The '-' character cannot be the first or - the last one. A system generated ID will be used if the - field is not set. - - The resource_allowance.name field in the request will be - ignored and the created resource name of the - ResourceAllowance will be - "{parent}/resourceAllowances/{resource_allowance_id}". - - This corresponds to the ``resource_allowance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, resource_allowance, resource_allowance_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CreateResourceAllowanceRequest): - request = batch.CreateResourceAllowanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if resource_allowance is not None: - request.resource_allowance = resource_allowance - if resource_allowance_id is not None: - request.resource_allowance_id = resource_allowance_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_resource_allowance(self, - request: Optional[Union[batch.GetResourceAllowanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resource_allowance.ResourceAllowance: - r"""Get a ResourceAllowance specified by its resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_get_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_resource_allowance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.GetResourceAllowanceRequest, dict]]): - The request object. GetResourceAllowance Request. - name (:class:`str`): - Required. ResourceAllowance name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetResourceAllowanceRequest): - request = batch.GetResourceAllowanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_resource_allowance(self, - request: Optional[Union[batch.DeleteResourceAllowanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Delete a ResourceAllowance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_delete_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_resource_allowance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.DeleteResourceAllowanceRequest, dict]]): - The request object. DeleteResourceAllowance Request. - name (:class:`str`): - Required. ResourceAllowance name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.DeleteResourceAllowanceRequest): - request = batch.DeleteResourceAllowanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - async def list_resource_allowances(self, - request: Optional[Union[batch.ListResourceAllowancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListResourceAllowancesAsyncPager: - r"""List all ResourceAllowances for a project within a - region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_list_resource_allowances(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListResourceAllowancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_resource_allowances(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.ListResourceAllowancesRequest, dict]]): - The request object. ListResourceAllowances Request. - parent (:class:`str`): - Required. Parent path. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesAsyncPager: - ListResourceAllowances Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListResourceAllowancesRequest): - request = batch.ListResourceAllowancesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_resource_allowances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListResourceAllowancesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_resource_allowance(self, - request: Optional[Union[batch.UpdateResourceAllowanceRequest, dict]] = None, - *, - resource_allowance: Optional[gcb_resource_allowance.ResourceAllowance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_resource_allowance.ResourceAllowance: - r"""Update a Resource Allowance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - async def sample_update_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.UpdateResourceAllowanceRequest( - resource_allowance=resource_allowance, - ) - - # Make the request - response = await client.update_resource_allowance(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.batch_v1alpha.types.UpdateResourceAllowanceRequest, dict]]): - The request object. UpdateResourceAllowance Request. - resource_allowance (:class:`google.cloud.batch_v1alpha.types.ResourceAllowance`): - Required. The ResourceAllowance to update. Update - description. Only fields specified in ``update_mask`` - are updated. - - This corresponds to the ``resource_allowance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - - Field mask is used to specify the fields to be - overwritten in the ResourceAllowance resource by the - update. The fields specified in the update_mask are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. - - UpdateResourceAllowance request now only supports update - on ``limit`` field. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource_allowance, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.UpdateResourceAllowanceRequest): - request = batch.UpdateResourceAllowanceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource_allowance is not None: - request.resource_allowance = resource_allowance - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource_allowance.name", request.resource_allowance.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "BatchServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BatchServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/client.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/client.py deleted file mode 100644 index 1f8d30895357..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/client.py +++ /dev/null @@ -1,2549 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.batch_v1alpha import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.batch_v1alpha.services.batch_service import pagers -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import notification -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BatchServiceGrpcTransport -from .transports.grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .transports.rest import BatchServiceRestTransport - - -class BatchServiceClientMeta(type): - """Metaclass for the BatchService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BatchServiceTransport]] - _transport_registry["grpc"] = BatchServiceGrpcTransport - _transport_registry["grpc_asyncio"] = BatchServiceGrpcAsyncIOTransport - _transport_registry["rest"] = BatchServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BatchServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BatchServiceClient(metaclass=BatchServiceClientMeta): - """Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "batch.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "batch.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BatchServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BatchServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BatchServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def job_path(project: str,location: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def resource_allowance_path(project: str,location: str,resource_allowance: str,) -> str: - """Returns a fully-qualified resource_allowance string.""" - return "projects/{project}/locations/{location}/resourceAllowances/{resource_allowance}".format(project=project, location=location, resource_allowance=resource_allowance, ) - - @staticmethod - def parse_resource_allowance_path(path: str) -> Dict[str,str]: - """Parses a resource_allowance path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/resourceAllowances/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_path(project: str,location: str,job: str,task_group: str,task: str,) -> str: - """Returns a fully-qualified task string.""" - return "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}/tasks/{task}".format(project=project, location=location, job=job, task_group=task_group, task=task, ) - - @staticmethod - def parse_task_path(path: str) -> Dict[str,str]: - """Parses a task path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)/taskGroups/(?P.+?)/tasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def task_group_path(project: str,location: str,job: str,task_group: str,) -> str: - """Returns a fully-qualified task_group string.""" - return "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}".format(project=project, location=location, job=job, task_group=task_group, ) - - @staticmethod - def parse_task_group_path(path: str) -> Dict[str,str]: - """Parses a task_group path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)/taskGroups/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = BatchServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = BatchServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = BatchServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BatchServiceTransport, Callable[..., BatchServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the batch service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BatchServiceTransport,Callable[..., BatchServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BatchServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BatchServiceClient._read_environment_variables() - self._client_cert_source = BatchServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BatchServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, BatchServiceTransport) - if transport_provided: - # transport is a BatchServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(BatchServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - BatchServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[BatchServiceTransport], Callable[..., BatchServiceTransport]] = ( - BatchServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., BatchServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.batch_v1alpha.BatchServiceClient`.", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "credentialsType": None, - } - ) - - def create_job(self, - request: Optional[Union[batch.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[gcb_job.Job] = None, - job_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_job.Job: - r"""Create a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_create_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.CreateJobRequest, dict]): - The request object. CreateJob Request. - parent (str): - Required. The parent resource name - where the Job will be created. Pattern: - "projects/{project}/locations/{location}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.batch_v1alpha.types.Job): - Required. The Job to create. - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_id (str): - ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters - and must start with lowercase characters. Only lowercase - characters, numbers and '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. - - The job.name field in the request will be ignored and - the created resource name of the Job will be - "{parent}/jobs/{job_id}". - - This corresponds to the ``job_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, job, job_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CreateJobRequest): - request = batch.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - if job_id is not None: - request.job_id = job_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[batch.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> job.Job: - r"""Get a Job specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_get_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.GetJobRequest, dict]): - The request object. GetJob Request. - name (str): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetJobRequest): - request = batch.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: Optional[Union[batch.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_delete_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.DeleteJobRequest, dict]): - The request object. DeleteJob Request. - name (str): - Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.DeleteJobRequest): - request = batch.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - def cancel_job(self, - request: Optional[Union[batch.CancelJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Cancel a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_cancel_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.CancelJobRequest, dict]): - The request object. CancelJob Request. - name (str): - Required. Job name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.batch_v1alpha.types.CancelJobResponse` - Response to the CancelJob request. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CancelJobRequest): - request = batch.CancelJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - batch.CancelJobResponse, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - def update_job(self, - request: Optional[Union[batch.UpdateJobRequest, dict]] = None, - *, - job: Optional[gcb_job.Job] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_job.Job: - r"""Update a Job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_update_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.UpdateJobRequest( - ) - - # Make the request - response = client.update_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.UpdateJobRequest, dict]): - The request object. UpdateJob Request. - job (google.cloud.batch_v1alpha.types.Job): - Required. The Job to update. Only fields specified in - ``updateMask`` are updated. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - The ``jobs.patch`` method can only be used while a job - is in the ``QUEUED``, ``SCHEDULED``, or ``RUNNING`` - state and currently only supports increasing the value - of the first ``taskCount`` field in the job's - ``taskGroups`` field. Therefore, you must set the value - of ``updateMask`` to ``taskGroups``. Any other job - fields in the update request will be ignored. - - For example, to update a job's ``taskCount`` to ``2``, - set ``updateMask`` to ``taskGroups`` and use the - following request body: - - :: - - { - "taskGroups":[{ - "taskCount": 2 - }] - } - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Job: - The Cloud Batch Job description. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [job, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.UpdateJobRequest): - request = batch.UpdateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if job is not None: - request.job = job - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("job.name", request.job.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[batch.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListJobsPager: - r"""List all Jobs for a project within a region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_list_jobs(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.ListJobsRequest, dict]): - The request object. ListJob Request. - parent (str): - Parent path. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsPager: - ListJob Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListJobsRequest): - request = batch.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_task(self, - request: Optional[Union[batch.GetTaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> task.Task: - r"""Return a single Task. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_get_task(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.GetTaskRequest, dict]): - The request object. Request for a single Task by name. - name (str): - Required. Task name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.Task: - A Cloud Batch task. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetTaskRequest): - request = batch.GetTaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_task] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tasks(self, - request: Optional[Union[batch.ListTasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTasksPager: - r"""List Tasks associated with a job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_list_tasks(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.ListTasksRequest, dict]): - The request object. ListTasks Request. - parent (str): - Required. Name of a TaskGroup from which Tasks are being - requested. Pattern: - "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksPager: - ListTasks Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListTasksRequest): - request = batch.ListTasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTasksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_resource_allowance(self, - request: Optional[Union[batch.CreateResourceAllowanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - resource_allowance: Optional[gcb_resource_allowance.ResourceAllowance] = None, - resource_allowance_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_resource_allowance.ResourceAllowance: - r"""Create a Resource Allowance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_create_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.CreateResourceAllowanceRequest( - parent="parent_value", - resource_allowance=resource_allowance, - ) - - # Make the request - response = client.create_resource_allowance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.CreateResourceAllowanceRequest, dict]): - The request object. CreateResourceAllowance Request. - parent (str): - Required. The parent resource name - where the ResourceAllowance will be - created. Pattern: - "projects/{project}/locations/{location}" - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource_allowance (google.cloud.batch_v1alpha.types.ResourceAllowance): - Required. The ResourceAllowance to - create. - - This corresponds to the ``resource_allowance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource_allowance_id (str): - ID used to uniquely identify the ResourceAllowance - within its parent scope. This field should contain at - most 63 characters and must start with lowercase - characters. Only lowercase characters, numbers and '-' - are accepted. The '-' character cannot be the first or - the last one. A system generated ID will be used if the - field is not set. - - The resource_allowance.name field in the request will be - ignored and the created resource name of the - ResourceAllowance will be - "{parent}/resourceAllowances/{resource_allowance_id}". - - This corresponds to the ``resource_allowance_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, resource_allowance, resource_allowance_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.CreateResourceAllowanceRequest): - request = batch.CreateResourceAllowanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if resource_allowance is not None: - request.resource_allowance = resource_allowance - if resource_allowance_id is not None: - request.resource_allowance_id = resource_allowance_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_resource_allowance(self, - request: Optional[Union[batch.GetResourceAllowanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> resource_allowance.ResourceAllowance: - r"""Get a ResourceAllowance specified by its resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_get_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_resource_allowance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.GetResourceAllowanceRequest, dict]): - The request object. GetResourceAllowance Request. - name (str): - Required. ResourceAllowance name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.GetResourceAllowanceRequest): - request = batch.GetResourceAllowanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_resource_allowance(self, - request: Optional[Union[batch.DeleteResourceAllowanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Delete a ResourceAllowance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_delete_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_resource_allowance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.DeleteResourceAllowanceRequest, dict]): - The request object. DeleteResourceAllowance Request. - name (str): - Required. ResourceAllowance name. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.DeleteResourceAllowanceRequest): - request = batch.DeleteResourceAllowanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=batch.OperationMetadata, - ) - - # Done; return the response. - return response - - def list_resource_allowances(self, - request: Optional[Union[batch.ListResourceAllowancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListResourceAllowancesPager: - r"""List all ResourceAllowances for a project within a - region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_list_resource_allowances(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListResourceAllowancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_resource_allowances(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.ListResourceAllowancesRequest, dict]): - The request object. ListResourceAllowances Request. - parent (str): - Required. Parent path. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesPager: - ListResourceAllowances Response. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.ListResourceAllowancesRequest): - request = batch.ListResourceAllowancesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_resource_allowances] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListResourceAllowancesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_resource_allowance(self, - request: Optional[Union[batch.UpdateResourceAllowanceRequest, dict]] = None, - *, - resource_allowance: Optional[gcb_resource_allowance.ResourceAllowance] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcb_resource_allowance.ResourceAllowance: - r"""Update a Resource Allowance. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import batch_v1alpha - - def sample_update_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.UpdateResourceAllowanceRequest( - resource_allowance=resource_allowance, - ) - - # Make the request - response = client.update_resource_allowance(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.batch_v1alpha.types.UpdateResourceAllowanceRequest, dict]): - The request object. UpdateResourceAllowance Request. - resource_allowance (google.cloud.batch_v1alpha.types.ResourceAllowance): - Required. The ResourceAllowance to update. Update - description. Only fields specified in ``update_mask`` - are updated. - - This corresponds to the ``resource_allowance`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - Field mask is used to specify the fields to be - overwritten in the ResourceAllowance resource by the - update. The fields specified in the update_mask are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. - - UpdateResourceAllowance request now only supports update - on ``limit`` field. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.batch_v1alpha.types.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource_allowance, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, batch.UpdateResourceAllowanceRequest): - request = batch.UpdateResourceAllowanceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource_allowance is not None: - request.resource_allowance = resource_allowance - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_resource_allowance] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource_allowance.name", request.resource_allowance.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "BatchServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BatchServiceClient", -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/pagers.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/pagers.py deleted file mode 100644 index 5f7d38620382..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/pagers.py +++ /dev/null @@ -1,447 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import task - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1alpha.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1alpha.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., batch.ListJobsResponse], - request: batch.ListJobsRequest, - response: batch.ListJobsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1alpha.types.ListJobsRequest): - The initial request object. - response (google.cloud.batch_v1alpha.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[batch.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[job.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1alpha.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1alpha.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[batch.ListJobsResponse]], - request: batch.ListJobsRequest, - response: batch.ListJobsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1alpha.types.ListJobsRequest): - The initial request object. - response (google.cloud.batch_v1alpha.types.ListJobsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListJobsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[batch.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[job.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1alpha.types.ListTasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1alpha.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., batch.ListTasksResponse], - request: batch.ListTasksRequest, - response: batch.ListTasksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1alpha.types.ListTasksRequest): - The initial request object. - response (google.cloud.batch_v1alpha.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[batch.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[task.Task]: - for page in self.pages: - yield from page.tasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTasksAsyncPager: - """A pager for iterating through ``list_tasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1alpha.types.ListTasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTasks`` requests and continue to iterate - through the ``tasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1alpha.types.ListTasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[batch.ListTasksResponse]], - request: batch.ListTasksRequest, - response: batch.ListTasksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1alpha.types.ListTasksRequest): - The initial request object. - response (google.cloud.batch_v1alpha.types.ListTasksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListTasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[batch.ListTasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[task.Task]: - async def async_generator(): - async for page in self.pages: - for response in page.tasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListResourceAllowancesPager: - """A pager for iterating through ``list_resource_allowances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1alpha.types.ListResourceAllowancesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``resource_allowances`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListResourceAllowances`` requests and continue to iterate - through the ``resource_allowances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1alpha.types.ListResourceAllowancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., batch.ListResourceAllowancesResponse], - request: batch.ListResourceAllowancesRequest, - response: batch.ListResourceAllowancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1alpha.types.ListResourceAllowancesRequest): - The initial request object. - response (google.cloud.batch_v1alpha.types.ListResourceAllowancesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListResourceAllowancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[batch.ListResourceAllowancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resource_allowance.ResourceAllowance]: - for page in self.pages: - yield from page.resource_allowances - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListResourceAllowancesAsyncPager: - """A pager for iterating through ``list_resource_allowances`` requests. - - This class thinly wraps an initial - :class:`google.cloud.batch_v1alpha.types.ListResourceAllowancesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``resource_allowances`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListResourceAllowances`` requests and continue to iterate - through the ``resource_allowances`` field on the - corresponding responses. - - All the usual :class:`google.cloud.batch_v1alpha.types.ListResourceAllowancesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[batch.ListResourceAllowancesResponse]], - request: batch.ListResourceAllowancesRequest, - response: batch.ListResourceAllowancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.batch_v1alpha.types.ListResourceAllowancesRequest): - The initial request object. - response (google.cloud.batch_v1alpha.types.ListResourceAllowancesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = batch.ListResourceAllowancesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[batch.ListResourceAllowancesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resource_allowance.ResourceAllowance]: - async def async_generator(): - async for page in self.pages: - for response in page.resource_allowances: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/README.rst b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/README.rst deleted file mode 100644 index c2f8f5a0e133..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`BatchServiceTransport` is the ABC for all transports. -- public child `BatchServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `BatchServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseBatchServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `BatchServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py deleted file mode 100644 index b14b7c9b9878..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BatchServiceTransport -from .grpc import BatchServiceGrpcTransport -from .grpc_asyncio import BatchServiceGrpcAsyncIOTransport -from .rest import BatchServiceRestTransport -from .rest import BatchServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BatchServiceTransport]] -_transport_registry['grpc'] = BatchServiceGrpcTransport -_transport_registry['grpc_asyncio'] = BatchServiceGrpcAsyncIOTransport -_transport_registry['rest'] = BatchServiceRestTransport - -__all__ = ( - 'BatchServiceTransport', - 'BatchServiceGrpcTransport', - 'BatchServiceGrpcAsyncIOTransport', - 'BatchServiceRestTransport', - 'BatchServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/base.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/base.py deleted file mode 100644 index bd13d5492cee..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/base.py +++ /dev/null @@ -1,471 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.batch_v1alpha import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BatchServiceTransport(abc.ABC): - """Abstract transport class for BatchService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'batch.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: gapic_v1.method.wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.update_job: gapic_v1.method.wrap_method( - self.update_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: gapic_v1.method.wrap_method( - self.get_task, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: gapic_v1.method.wrap_method( - self.list_tasks, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_resource_allowance: gapic_v1.method.wrap_method( - self.create_resource_allowance, - default_timeout=60.0, - client_info=client_info, - ), - self.get_resource_allowance: gapic_v1.method.wrap_method( - self.get_resource_allowance, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_resource_allowance: gapic_v1.method.wrap_method( - self.delete_resource_allowance, - default_timeout=60.0, - client_info=client_info, - ), - self.list_resource_allowances: gapic_v1.method.wrap_method( - self.list_resource_allowances, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_resource_allowance: gapic_v1.method.wrap_method( - self.update_resource_allowance, - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - Union[ - gcb_job.Job, - Awaitable[gcb_job.Job] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - Union[ - job.Job, - Awaitable[job.Job] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_job(self) -> Callable[ - [batch.UpdateJobRequest], - Union[ - gcb_job.Job, - Awaitable[gcb_job.Job] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - Union[ - batch.ListJobsResponse, - Awaitable[batch.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - Union[ - task.Task, - Awaitable[task.Task] - ]]: - raise NotImplementedError() - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - Union[ - batch.ListTasksResponse, - Awaitable[batch.ListTasksResponse] - ]]: - raise NotImplementedError() - - @property - def create_resource_allowance(self) -> Callable[ - [batch.CreateResourceAllowanceRequest], - Union[ - gcb_resource_allowance.ResourceAllowance, - Awaitable[gcb_resource_allowance.ResourceAllowance] - ]]: - raise NotImplementedError() - - @property - def get_resource_allowance(self) -> Callable[ - [batch.GetResourceAllowanceRequest], - Union[ - resource_allowance.ResourceAllowance, - Awaitable[resource_allowance.ResourceAllowance] - ]]: - raise NotImplementedError() - - @property - def delete_resource_allowance(self) -> Callable[ - [batch.DeleteResourceAllowanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_resource_allowances(self) -> Callable[ - [batch.ListResourceAllowancesRequest], - Union[ - batch.ListResourceAllowancesResponse, - Awaitable[batch.ListResourceAllowancesResponse] - ]]: - raise NotImplementedError() - - @property - def update_resource_allowance(self) -> Callable[ - [batch.UpdateResourceAllowanceRequest], - Union[ - gcb_resource_allowance.ResourceAllowance, - Awaitable[gcb_resource_allowance.ResourceAllowance] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BatchServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py deleted file mode 100644 index 9e2856a0f728..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/grpc.py +++ /dev/null @@ -1,796 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BatchServiceGrpcTransport(BatchServiceTransport): - """gRPC backend transport for BatchService. - - Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - gcb_job.Job]: - r"""Return a callable for the create job method over gRPC. - - Create a Job. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/CreateJob', - request_serializer=batch.CreateJobRequest.serialize, - response_deserializer=gcb_job.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - job.Job]: - r"""Return a callable for the get job method over gRPC. - - Get a Job specified by its resource name. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/GetJob', - request_serializer=batch.GetJobRequest.serialize, - response_deserializer=job.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete job method over gRPC. - - Delete a Job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/DeleteJob', - request_serializer=batch.DeleteJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_job'] - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - operations_pb2.Operation]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel a Job. - - Returns: - Callable[[~.CancelJobRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/CancelJob', - request_serializer=batch.CancelJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['cancel_job'] - - @property - def update_job(self) -> Callable[ - [batch.UpdateJobRequest], - gcb_job.Job]: - r"""Return a callable for the update job method over gRPC. - - Update a Job. - - Returns: - Callable[[~.UpdateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job' not in self._stubs: - self._stubs['update_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/UpdateJob', - request_serializer=batch.UpdateJobRequest.serialize, - response_deserializer=gcb_job.Job.deserialize, - ) - return self._stubs['update_job'] - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - batch.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - List all Jobs for a project within a region. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/ListJobs', - request_serializer=batch.ListJobsRequest.serialize, - response_deserializer=batch.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - task.Task]: - r"""Return a callable for the get task method over gRPC. - - Return a single Task. - - Returns: - Callable[[~.GetTaskRequest], - ~.Task]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/GetTask', - request_serializer=batch.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - batch.ListTasksResponse]: - r"""Return a callable for the list tasks method over gRPC. - - List Tasks associated with a job. - - Returns: - Callable[[~.ListTasksRequest], - ~.ListTasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/ListTasks', - request_serializer=batch.ListTasksRequest.serialize, - response_deserializer=batch.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def create_resource_allowance(self) -> Callable[ - [batch.CreateResourceAllowanceRequest], - gcb_resource_allowance.ResourceAllowance]: - r"""Return a callable for the create resource allowance method over gRPC. - - Create a Resource Allowance. - - Returns: - Callable[[~.CreateResourceAllowanceRequest], - ~.ResourceAllowance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_resource_allowance' not in self._stubs: - self._stubs['create_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/CreateResourceAllowance', - request_serializer=batch.CreateResourceAllowanceRequest.serialize, - response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, - ) - return self._stubs['create_resource_allowance'] - - @property - def get_resource_allowance(self) -> Callable[ - [batch.GetResourceAllowanceRequest], - resource_allowance.ResourceAllowance]: - r"""Return a callable for the get resource allowance method over gRPC. - - Get a ResourceAllowance specified by its resource - name. - - Returns: - Callable[[~.GetResourceAllowanceRequest], - ~.ResourceAllowance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_resource_allowance' not in self._stubs: - self._stubs['get_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/GetResourceAllowance', - request_serializer=batch.GetResourceAllowanceRequest.serialize, - response_deserializer=resource_allowance.ResourceAllowance.deserialize, - ) - return self._stubs['get_resource_allowance'] - - @property - def delete_resource_allowance(self) -> Callable[ - [batch.DeleteResourceAllowanceRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete resource allowance method over gRPC. - - Delete a ResourceAllowance. - - Returns: - Callable[[~.DeleteResourceAllowanceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_resource_allowance' not in self._stubs: - self._stubs['delete_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/DeleteResourceAllowance', - request_serializer=batch.DeleteResourceAllowanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_resource_allowance'] - - @property - def list_resource_allowances(self) -> Callable[ - [batch.ListResourceAllowancesRequest], - batch.ListResourceAllowancesResponse]: - r"""Return a callable for the list resource allowances method over gRPC. - - List all ResourceAllowances for a project within a - region. - - Returns: - Callable[[~.ListResourceAllowancesRequest], - ~.ListResourceAllowancesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_resource_allowances' not in self._stubs: - self._stubs['list_resource_allowances'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/ListResourceAllowances', - request_serializer=batch.ListResourceAllowancesRequest.serialize, - response_deserializer=batch.ListResourceAllowancesResponse.deserialize, - ) - return self._stubs['list_resource_allowances'] - - @property - def update_resource_allowance(self) -> Callable[ - [batch.UpdateResourceAllowanceRequest], - gcb_resource_allowance.ResourceAllowance]: - r"""Return a callable for the update resource allowance method over gRPC. - - Update a Resource Allowance. - - Returns: - Callable[[~.UpdateResourceAllowanceRequest], - ~.ResourceAllowance]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_resource_allowance' not in self._stubs: - self._stubs['update_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/UpdateResourceAllowance', - request_serializer=batch.UpdateResourceAllowanceRequest.serialize, - response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, - ) - return self._stubs['update_resource_allowance'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BatchServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py deleted file mode 100644 index b11f03af593c..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,960 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import BatchServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BatchServiceGrpcAsyncIOTransport(BatchServiceTransport): - """gRPC AsyncIO backend transport for BatchService. - - Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - Awaitable[gcb_job.Job]]: - r"""Return a callable for the create job method over gRPC. - - Create a Job. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/CreateJob', - request_serializer=batch.CreateJobRequest.serialize, - response_deserializer=gcb_job.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - Awaitable[job.Job]]: - r"""Return a callable for the get job method over gRPC. - - Get a Job specified by its resource name. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/GetJob', - request_serializer=batch.GetJobRequest.serialize, - response_deserializer=job.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete job method over gRPC. - - Delete a Job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/DeleteJob', - request_serializer=batch.DeleteJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_job'] - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the cancel job method over gRPC. - - Cancel a Job. - - Returns: - Callable[[~.CancelJobRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_job' not in self._stubs: - self._stubs['cancel_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/CancelJob', - request_serializer=batch.CancelJobRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['cancel_job'] - - @property - def update_job(self) -> Callable[ - [batch.UpdateJobRequest], - Awaitable[gcb_job.Job]]: - r"""Return a callable for the update job method over gRPC. - - Update a Job. - - Returns: - Callable[[~.UpdateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job' not in self._stubs: - self._stubs['update_job'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/UpdateJob', - request_serializer=batch.UpdateJobRequest.serialize, - response_deserializer=gcb_job.Job.deserialize, - ) - return self._stubs['update_job'] - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - Awaitable[batch.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - List all Jobs for a project within a region. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/ListJobs', - request_serializer=batch.ListJobsRequest.serialize, - response_deserializer=batch.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - Awaitable[task.Task]]: - r"""Return a callable for the get task method over gRPC. - - Return a single Task. - - Returns: - Callable[[~.GetTaskRequest], - Awaitable[~.Task]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_task' not in self._stubs: - self._stubs['get_task'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/GetTask', - request_serializer=batch.GetTaskRequest.serialize, - response_deserializer=task.Task.deserialize, - ) - return self._stubs['get_task'] - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - Awaitable[batch.ListTasksResponse]]: - r"""Return a callable for the list tasks method over gRPC. - - List Tasks associated with a job. - - Returns: - Callable[[~.ListTasksRequest], - Awaitable[~.ListTasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tasks' not in self._stubs: - self._stubs['list_tasks'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/ListTasks', - request_serializer=batch.ListTasksRequest.serialize, - response_deserializer=batch.ListTasksResponse.deserialize, - ) - return self._stubs['list_tasks'] - - @property - def create_resource_allowance(self) -> Callable[ - [batch.CreateResourceAllowanceRequest], - Awaitable[gcb_resource_allowance.ResourceAllowance]]: - r"""Return a callable for the create resource allowance method over gRPC. - - Create a Resource Allowance. - - Returns: - Callable[[~.CreateResourceAllowanceRequest], - Awaitable[~.ResourceAllowance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_resource_allowance' not in self._stubs: - self._stubs['create_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/CreateResourceAllowance', - request_serializer=batch.CreateResourceAllowanceRequest.serialize, - response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, - ) - return self._stubs['create_resource_allowance'] - - @property - def get_resource_allowance(self) -> Callable[ - [batch.GetResourceAllowanceRequest], - Awaitable[resource_allowance.ResourceAllowance]]: - r"""Return a callable for the get resource allowance method over gRPC. - - Get a ResourceAllowance specified by its resource - name. - - Returns: - Callable[[~.GetResourceAllowanceRequest], - Awaitable[~.ResourceAllowance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_resource_allowance' not in self._stubs: - self._stubs['get_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/GetResourceAllowance', - request_serializer=batch.GetResourceAllowanceRequest.serialize, - response_deserializer=resource_allowance.ResourceAllowance.deserialize, - ) - return self._stubs['get_resource_allowance'] - - @property - def delete_resource_allowance(self) -> Callable[ - [batch.DeleteResourceAllowanceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete resource allowance method over gRPC. - - Delete a ResourceAllowance. - - Returns: - Callable[[~.DeleteResourceAllowanceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_resource_allowance' not in self._stubs: - self._stubs['delete_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/DeleteResourceAllowance', - request_serializer=batch.DeleteResourceAllowanceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_resource_allowance'] - - @property - def list_resource_allowances(self) -> Callable[ - [batch.ListResourceAllowancesRequest], - Awaitable[batch.ListResourceAllowancesResponse]]: - r"""Return a callable for the list resource allowances method over gRPC. - - List all ResourceAllowances for a project within a - region. - - Returns: - Callable[[~.ListResourceAllowancesRequest], - Awaitable[~.ListResourceAllowancesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_resource_allowances' not in self._stubs: - self._stubs['list_resource_allowances'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/ListResourceAllowances', - request_serializer=batch.ListResourceAllowancesRequest.serialize, - response_deserializer=batch.ListResourceAllowancesResponse.deserialize, - ) - return self._stubs['list_resource_allowances'] - - @property - def update_resource_allowance(self) -> Callable[ - [batch.UpdateResourceAllowanceRequest], - Awaitable[gcb_resource_allowance.ResourceAllowance]]: - r"""Return a callable for the update resource allowance method over gRPC. - - Update a Resource Allowance. - - Returns: - Callable[[~.UpdateResourceAllowanceRequest], - Awaitable[~.ResourceAllowance]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_resource_allowance' not in self._stubs: - self._stubs['update_resource_allowance'] = self._logged_channel.unary_unary( - '/google.cloud.batch.v1alpha.BatchService/UpdateResourceAllowance', - request_serializer=batch.UpdateResourceAllowanceRequest.serialize, - response_deserializer=gcb_resource_allowance.ResourceAllowance.deserialize, - ) - return self._stubs['update_resource_allowance'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_job: self._wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: self._wrap_method( - self.get_job, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: self._wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.cancel_job: self._wrap_method( - self.cancel_job, - default_timeout=60.0, - client_info=client_info, - ), - self.update_job: self._wrap_method( - self.update_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: self._wrap_method( - self.list_jobs, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_task: self._wrap_method( - self.get_task, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_tasks: self._wrap_method( - self.list_tasks, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_resource_allowance: self._wrap_method( - self.create_resource_allowance, - default_timeout=60.0, - client_info=client_info, - ), - self.get_resource_allowance: self._wrap_method( - self.get_resource_allowance, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_resource_allowance: self._wrap_method( - self.delete_resource_allowance, - default_timeout=60.0, - client_info=client_info, - ), - self.list_resource_allowances: self._wrap_method( - self.list_resource_allowances, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_resource_allowance: self._wrap_method( - self.update_resource_allowance, - default_timeout=60.0, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'BatchServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py deleted file mode 100644 index 23421f7f0a71..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/rest.py +++ /dev/null @@ -1,3275 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseBatchServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class BatchServiceRestInterceptor: - """Interceptor for BatchService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the BatchServiceRestTransport. - - .. code-block:: python - class MyCustomBatchServiceInterceptor(BatchServiceRestInterceptor): - def pre_cancel_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_cancel_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_resource_allowance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_resource_allowance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_resource_allowance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_resource_allowance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_resource_allowance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_resource_allowance(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_task(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_task(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_resource_allowances(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_resource_allowances(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_tasks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_tasks(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_resource_allowance(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_resource_allowance(self, response): - logging.log(f"Received response: {response}") - return response - - transport = BatchServiceRestTransport(interceptor=MyCustomBatchServiceInterceptor()) - client = BatchServiceClient(transport=transport) - - - """ - def pre_cancel_job(self, request: batch.CancelJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.CancelJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_cancel_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for cancel_job - - DEPRECATED. Please use the `post_cancel_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_cancel_job` interceptor runs - before the `post_cancel_job_with_metadata` interceptor. - """ - return response - - def post_cancel_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for cancel_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_cancel_job_with_metadata` - interceptor in new development instead of the `post_cancel_job` interceptor. - When both interceptors are used, this `post_cancel_job_with_metadata` interceptor runs after the - `post_cancel_job` interceptor. The (possibly modified) response returned by - `post_cancel_job` will be passed to - `post_cancel_job_with_metadata`. - """ - return response, metadata - - def pre_create_job(self, request: batch.CreateJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.CreateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_create_job(self, response: gcb_job.Job) -> gcb_job.Job: - """Post-rpc interceptor for create_job - - DEPRECATED. Please use the `post_create_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_create_job` interceptor runs - before the `post_create_job_with_metadata` interceptor. - """ - return response - - def post_create_job_with_metadata(self, response: gcb_job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_create_job_with_metadata` - interceptor in new development instead of the `post_create_job` interceptor. - When both interceptors are used, this `post_create_job_with_metadata` interceptor runs after the - `post_create_job` interceptor. The (possibly modified) response returned by - `post_create_job` will be passed to - `post_create_job_with_metadata`. - """ - return response, metadata - - def pre_create_resource_allowance(self, request: batch.CreateResourceAllowanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.CreateResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_resource_allowance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_create_resource_allowance(self, response: gcb_resource_allowance.ResourceAllowance) -> gcb_resource_allowance.ResourceAllowance: - """Post-rpc interceptor for create_resource_allowance - - DEPRECATED. Please use the `post_create_resource_allowance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_create_resource_allowance` interceptor runs - before the `post_create_resource_allowance_with_metadata` interceptor. - """ - return response - - def post_create_resource_allowance_with_metadata(self, response: gcb_resource_allowance.ResourceAllowance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_resource_allowance.ResourceAllowance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_resource_allowance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_create_resource_allowance_with_metadata` - interceptor in new development instead of the `post_create_resource_allowance` interceptor. - When both interceptors are used, this `post_create_resource_allowance_with_metadata` interceptor runs after the - `post_create_resource_allowance` interceptor. The (possibly modified) response returned by - `post_create_resource_allowance` will be passed to - `post_create_resource_allowance_with_metadata`. - """ - return response, metadata - - def pre_delete_job(self, request: batch.DeleteJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.DeleteJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_delete_job(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_job - - DEPRECATED. Please use the `post_delete_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_delete_job` interceptor runs - before the `post_delete_job_with_metadata` interceptor. - """ - return response - - def post_delete_job_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_delete_job_with_metadata` - interceptor in new development instead of the `post_delete_job` interceptor. - When both interceptors are used, this `post_delete_job_with_metadata` interceptor runs after the - `post_delete_job` interceptor. The (possibly modified) response returned by - `post_delete_job` will be passed to - `post_delete_job_with_metadata`. - """ - return response, metadata - - def pre_delete_resource_allowance(self, request: batch.DeleteResourceAllowanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.DeleteResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_resource_allowance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_delete_resource_allowance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_resource_allowance - - DEPRECATED. Please use the `post_delete_resource_allowance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_delete_resource_allowance` interceptor runs - before the `post_delete_resource_allowance_with_metadata` interceptor. - """ - return response - - def post_delete_resource_allowance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_resource_allowance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_delete_resource_allowance_with_metadata` - interceptor in new development instead of the `post_delete_resource_allowance` interceptor. - When both interceptors are used, this `post_delete_resource_allowance_with_metadata` interceptor runs after the - `post_delete_resource_allowance` interceptor. The (possibly modified) response returned by - `post_delete_resource_allowance` will be passed to - `post_delete_resource_allowance_with_metadata`. - """ - return response, metadata - - def pre_get_job(self, request: batch.GetJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.GetJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_job(self, response: job.Job) -> job.Job: - """Post-rpc interceptor for get_job - - DEPRECATED. Please use the `post_get_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_get_job` interceptor runs - before the `post_get_job_with_metadata` interceptor. - """ - return response - - def post_get_job_with_metadata(self, response: job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_get_job_with_metadata` - interceptor in new development instead of the `post_get_job` interceptor. - When both interceptors are used, this `post_get_job_with_metadata` interceptor runs after the - `post_get_job` interceptor. The (possibly modified) response returned by - `post_get_job` will be passed to - `post_get_job_with_metadata`. - """ - return response, metadata - - def pre_get_resource_allowance(self, request: batch.GetResourceAllowanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.GetResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_resource_allowance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_resource_allowance(self, response: resource_allowance.ResourceAllowance) -> resource_allowance.ResourceAllowance: - """Post-rpc interceptor for get_resource_allowance - - DEPRECATED. Please use the `post_get_resource_allowance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_get_resource_allowance` interceptor runs - before the `post_get_resource_allowance_with_metadata` interceptor. - """ - return response - - def post_get_resource_allowance_with_metadata(self, response: resource_allowance.ResourceAllowance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[resource_allowance.ResourceAllowance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_resource_allowance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_get_resource_allowance_with_metadata` - interceptor in new development instead of the `post_get_resource_allowance` interceptor. - When both interceptors are used, this `post_get_resource_allowance_with_metadata` interceptor runs after the - `post_get_resource_allowance` interceptor. The (possibly modified) response returned by - `post_get_resource_allowance` will be passed to - `post_get_resource_allowance_with_metadata`. - """ - return response, metadata - - def pre_get_task(self, request: batch.GetTaskRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.GetTaskRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_task - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_task(self, response: task.Task) -> task.Task: - """Post-rpc interceptor for get_task - - DEPRECATED. Please use the `post_get_task_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_get_task` interceptor runs - before the `post_get_task_with_metadata` interceptor. - """ - return response - - def post_get_task_with_metadata(self, response: task.Task, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[task.Task, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_task - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_get_task_with_metadata` - interceptor in new development instead of the `post_get_task` interceptor. - When both interceptors are used, this `post_get_task_with_metadata` interceptor runs after the - `post_get_task` interceptor. The (possibly modified) response returned by - `post_get_task` will be passed to - `post_get_task_with_metadata`. - """ - return response, metadata - - def pre_list_jobs(self, request: batch.ListJobsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListJobsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_jobs(self, response: batch.ListJobsResponse) -> batch.ListJobsResponse: - """Post-rpc interceptor for list_jobs - - DEPRECATED. Please use the `post_list_jobs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_list_jobs` interceptor runs - before the `post_list_jobs_with_metadata` interceptor. - """ - return response - - def post_list_jobs_with_metadata(self, response: batch.ListJobsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListJobsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_jobs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_list_jobs_with_metadata` - interceptor in new development instead of the `post_list_jobs` interceptor. - When both interceptors are used, this `post_list_jobs_with_metadata` interceptor runs after the - `post_list_jobs` interceptor. The (possibly modified) response returned by - `post_list_jobs` will be passed to - `post_list_jobs_with_metadata`. - """ - return response, metadata - - def pre_list_resource_allowances(self, request: batch.ListResourceAllowancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListResourceAllowancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_resource_allowances - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_resource_allowances(self, response: batch.ListResourceAllowancesResponse) -> batch.ListResourceAllowancesResponse: - """Post-rpc interceptor for list_resource_allowances - - DEPRECATED. Please use the `post_list_resource_allowances_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_list_resource_allowances` interceptor runs - before the `post_list_resource_allowances_with_metadata` interceptor. - """ - return response - - def post_list_resource_allowances_with_metadata(self, response: batch.ListResourceAllowancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListResourceAllowancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_resource_allowances - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_list_resource_allowances_with_metadata` - interceptor in new development instead of the `post_list_resource_allowances` interceptor. - When both interceptors are used, this `post_list_resource_allowances_with_metadata` interceptor runs after the - `post_list_resource_allowances` interceptor. The (possibly modified) response returned by - `post_list_resource_allowances` will be passed to - `post_list_resource_allowances_with_metadata`. - """ - return response, metadata - - def pre_list_tasks(self, request: batch.ListTasksRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListTasksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_tasks - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_tasks(self, response: batch.ListTasksResponse) -> batch.ListTasksResponse: - """Post-rpc interceptor for list_tasks - - DEPRECATED. Please use the `post_list_tasks_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_list_tasks` interceptor runs - before the `post_list_tasks_with_metadata` interceptor. - """ - return response - - def post_list_tasks_with_metadata(self, response: batch.ListTasksResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.ListTasksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_tasks - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_list_tasks_with_metadata` - interceptor in new development instead of the `post_list_tasks` interceptor. - When both interceptors are used, this `post_list_tasks_with_metadata` interceptor runs after the - `post_list_tasks` interceptor. The (possibly modified) response returned by - `post_list_tasks` will be passed to - `post_list_tasks_with_metadata`. - """ - return response, metadata - - def pre_update_job(self, request: batch.UpdateJobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.UpdateJobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_update_job(self, response: gcb_job.Job) -> gcb_job.Job: - """Post-rpc interceptor for update_job - - DEPRECATED. Please use the `post_update_job_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_update_job` interceptor runs - before the `post_update_job_with_metadata` interceptor. - """ - return response - - def post_update_job_with_metadata(self, response: gcb_job.Job, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_job.Job, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_job - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_update_job_with_metadata` - interceptor in new development instead of the `post_update_job` interceptor. - When both interceptors are used, this `post_update_job_with_metadata` interceptor runs after the - `post_update_job` interceptor. The (possibly modified) response returned by - `post_update_job` will be passed to - `post_update_job_with_metadata`. - """ - return response, metadata - - def pre_update_resource_allowance(self, request: batch.UpdateResourceAllowanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[batch.UpdateResourceAllowanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_resource_allowance - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_update_resource_allowance(self, response: gcb_resource_allowance.ResourceAllowance) -> gcb_resource_allowance.ResourceAllowance: - """Post-rpc interceptor for update_resource_allowance - - DEPRECATED. Please use the `post_update_resource_allowance_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. This `post_update_resource_allowance` interceptor runs - before the `post_update_resource_allowance_with_metadata` interceptor. - """ - return response - - def post_update_resource_allowance_with_metadata(self, response: gcb_resource_allowance.ResourceAllowance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcb_resource_allowance.ResourceAllowance, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_resource_allowance - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BatchService server but before it is returned to user code. - - We recommend only using this `post_update_resource_allowance_with_metadata` - interceptor in new development instead of the `post_update_resource_allowance` interceptor. - When both interceptors are used, this `post_update_resource_allowance_with_metadata` interceptor runs after the - `post_update_resource_allowance` interceptor. The (possibly modified) response returned by - `post_update_resource_allowance` will be passed to - `post_update_resource_allowance_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the BatchService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the BatchService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class BatchServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BatchServiceRestInterceptor - - -class BatchServiceRestTransport(_BaseBatchServiceRestTransport): - """REST backend synchronous transport for BatchService. - - Google Batch Service. - The service manages user submitted batch jobs and allocates - Google Compute Engine VM instances to run the jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BatchServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BatchServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1alpha/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1alpha/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1alpha") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CancelJob(_BaseBatchServiceRestTransport._BaseCancelJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CancelJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.CancelJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the cancel job method over HTTP. - - Args: - request (~.batch.CancelJobRequest): - The request object. CancelJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseCancelJob._get_http_options() - - request, metadata = self._interceptor.pre_cancel_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCancelJob._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCancelJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCancelJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CancelJob", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CancelJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CancelJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_cancel_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_cancel_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.cancel_job", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CancelJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateJob(_BaseBatchServiceRestTransport._BaseCreateJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CreateJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.CreateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_job.Job: - r"""Call the create job method over HTTP. - - Args: - request (~.batch.CreateJobRequest): - The request object. CreateJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_job.Job: - The Cloud Batch Job description. - """ - - http_options = _BaseBatchServiceRestTransport._BaseCreateJob._get_http_options() - - request, metadata = self._interceptor.pre_create_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCreateJob._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCreateJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCreateJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CreateJob", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CreateJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CreateJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_job.Job() - pb_resp = gcb_job.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_job.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.create_job", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CreateJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateResourceAllowance(_BaseBatchServiceRestTransport._BaseCreateResourceAllowance, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CreateResourceAllowance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.CreateResourceAllowanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_resource_allowance.ResourceAllowance: - r"""Call the create resource allowance method over HTTP. - - Args: - request (~.batch.CreateResourceAllowanceRequest): - The request object. CreateResourceAllowance Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_resource_allowance.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseCreateResourceAllowance._get_http_options() - - request, metadata = self._interceptor.pre_create_resource_allowance(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCreateResourceAllowance._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCreateResourceAllowance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCreateResourceAllowance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CreateResourceAllowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CreateResourceAllowance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CreateResourceAllowance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_resource_allowance.ResourceAllowance() - pb_resp = gcb_resource_allowance.ResourceAllowance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_resource_allowance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_resource_allowance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_resource_allowance.ResourceAllowance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.create_resource_allowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CreateResourceAllowance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteJob(_BaseBatchServiceRestTransport._BaseDeleteJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.DeleteJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.DeleteJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete job method over HTTP. - - Args: - request (~.batch.DeleteJobRequest): - The request object. DeleteJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseDeleteJob._get_http_options() - - request, metadata = self._interceptor.pre_delete_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseDeleteJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseDeleteJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.DeleteJob", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "DeleteJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._DeleteJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.delete_job", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "DeleteJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteResourceAllowance(_BaseBatchServiceRestTransport._BaseDeleteResourceAllowance, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.DeleteResourceAllowance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.DeleteResourceAllowanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete resource allowance method over HTTP. - - Args: - request (~.batch.DeleteResourceAllowanceRequest): - The request object. DeleteResourceAllowance Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseDeleteResourceAllowance._get_http_options() - - request, metadata = self._interceptor.pre_delete_resource_allowance(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseDeleteResourceAllowance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseDeleteResourceAllowance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.DeleteResourceAllowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "DeleteResourceAllowance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._DeleteResourceAllowance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_resource_allowance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_resource_allowance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.delete_resource_allowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "DeleteResourceAllowance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetJob(_BaseBatchServiceRestTransport._BaseGetJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.GetJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> job.Job: - r"""Call the get job method over HTTP. - - Args: - request (~.batch.GetJobRequest): - The request object. GetJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.job.Job: - The Cloud Batch Job description. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetJob._get_http_options() - - request, metadata = self._interceptor.pre_get_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetJob._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetJob", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = job.Job() - pb_resp = job.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = job.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.get_job", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetResourceAllowance(_BaseBatchServiceRestTransport._BaseGetResourceAllowance, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetResourceAllowance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.GetResourceAllowanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> resource_allowance.ResourceAllowance: - r"""Call the get resource allowance method over HTTP. - - Args: - request (~.batch.GetResourceAllowanceRequest): - The request object. GetResourceAllowance Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.resource_allowance.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetResourceAllowance._get_http_options() - - request, metadata = self._interceptor.pre_get_resource_allowance(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetResourceAllowance._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetResourceAllowance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetResourceAllowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetResourceAllowance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetResourceAllowance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = resource_allowance.ResourceAllowance() - pb_resp = resource_allowance.ResourceAllowance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_resource_allowance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_resource_allowance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = resource_allowance.ResourceAllowance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.get_resource_allowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetResourceAllowance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTask(_BaseBatchServiceRestTransport._BaseGetTask, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetTask") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.GetTaskRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> task.Task: - r"""Call the get task method over HTTP. - - Args: - request (~.batch.GetTaskRequest): - The request object. Request for a single Task by name. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.task.Task: - A Cloud Batch task. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetTask._get_http_options() - - request, metadata = self._interceptor.pre_get_task(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetTask._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetTask._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetTask", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetTask", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetTask._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = task.Task() - pb_resp = task.Task.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_task(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_task_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = task.Task.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.get_task", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetTask", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListJobs(_BaseBatchServiceRestTransport._BaseListJobs, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListJobs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> batch.ListJobsResponse: - r"""Call the list jobs method over HTTP. - - Args: - request (~.batch.ListJobsRequest): - The request object. ListJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.batch.ListJobsResponse: - ListJob Response. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListJobs._get_http_options() - - request, metadata = self._interceptor.pre_list_jobs(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListJobs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListJobs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListJobs", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListJobs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListJobs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batch.ListJobsResponse() - pb_resp = batch.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_jobs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_jobs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = batch.ListJobsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.list_jobs", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListJobs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListResourceAllowances(_BaseBatchServiceRestTransport._BaseListResourceAllowances, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListResourceAllowances") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.ListResourceAllowancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> batch.ListResourceAllowancesResponse: - r"""Call the list resource allowances method over HTTP. - - Args: - request (~.batch.ListResourceAllowancesRequest): - The request object. ListResourceAllowances Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.batch.ListResourceAllowancesResponse: - ListResourceAllowances Response. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListResourceAllowances._get_http_options() - - request, metadata = self._interceptor.pre_list_resource_allowances(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListResourceAllowances._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListResourceAllowances._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListResourceAllowances", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListResourceAllowances", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListResourceAllowances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batch.ListResourceAllowancesResponse() - pb_resp = batch.ListResourceAllowancesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_resource_allowances(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_resource_allowances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = batch.ListResourceAllowancesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.list_resource_allowances", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListResourceAllowances", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTasks(_BaseBatchServiceRestTransport._BaseListTasks, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListTasks") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: batch.ListTasksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> batch.ListTasksResponse: - r"""Call the list tasks method over HTTP. - - Args: - request (~.batch.ListTasksRequest): - The request object. ListTasks Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.batch.ListTasksResponse: - ListTasks Response. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListTasks._get_http_options() - - request, metadata = self._interceptor.pre_list_tasks(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListTasks._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListTasks._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListTasks", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListTasks", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListTasks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = batch.ListTasksResponse() - pb_resp = batch.ListTasksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_tasks(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_tasks_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = batch.ListTasksResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.list_tasks", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListTasks", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateJob(_BaseBatchServiceRestTransport._BaseUpdateJob, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.UpdateJob") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.UpdateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_job.Job: - r"""Call the update job method over HTTP. - - Args: - request (~.batch.UpdateJobRequest): - The request object. UpdateJob Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_job.Job: - The Cloud Batch Job description. - """ - - http_options = _BaseBatchServiceRestTransport._BaseUpdateJob._get_http_options() - - request, metadata = self._interceptor.pre_update_job(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseUpdateJob._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseUpdateJob._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseUpdateJob._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.UpdateJob", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "UpdateJob", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._UpdateJob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_job.Job() - pb_resp = gcb_job.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_job(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_job_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_job.Job.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.update_job", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "UpdateJob", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateResourceAllowance(_BaseBatchServiceRestTransport._BaseUpdateResourceAllowance, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.UpdateResourceAllowance") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: batch.UpdateResourceAllowanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcb_resource_allowance.ResourceAllowance: - r"""Call the update resource allowance method over HTTP. - - Args: - request (~.batch.UpdateResourceAllowanceRequest): - The request object. UpdateResourceAllowance Request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcb_resource_allowance.ResourceAllowance: - The Resource Allowance description - for Cloud Batch. Only one Resource - Allowance is supported now under a - specific location and project. - - """ - - http_options = _BaseBatchServiceRestTransport._BaseUpdateResourceAllowance._get_http_options() - - request, metadata = self._interceptor.pre_update_resource_allowance(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseUpdateResourceAllowance._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseUpdateResourceAllowance._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseUpdateResourceAllowance._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.UpdateResourceAllowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "UpdateResourceAllowance", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._UpdateResourceAllowance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcb_resource_allowance.ResourceAllowance() - pb_resp = gcb_resource_allowance.ResourceAllowance.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_resource_allowance(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_resource_allowance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcb_resource_allowance.ResourceAllowance.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceClient.update_resource_allowance", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "UpdateResourceAllowance", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def cancel_job(self) -> Callable[ - [batch.CancelJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CancelJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job(self) -> Callable[ - [batch.CreateJobRequest], - gcb_job.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_resource_allowance(self) -> Callable[ - [batch.CreateResourceAllowanceRequest], - gcb_resource_allowance.ResourceAllowance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateResourceAllowance(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_job(self) -> Callable[ - [batch.DeleteJobRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_resource_allowance(self) -> Callable[ - [batch.DeleteResourceAllowanceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteResourceAllowance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job(self) -> Callable[ - [batch.GetJobRequest], - job.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_resource_allowance(self) -> Callable[ - [batch.GetResourceAllowanceRequest], - resource_allowance.ResourceAllowance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetResourceAllowance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_task(self) -> Callable[ - [batch.GetTaskRequest], - task.Task]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTask(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_jobs(self) -> Callable[ - [batch.ListJobsRequest], - batch.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_resource_allowances(self) -> Callable[ - [batch.ListResourceAllowancesRequest], - batch.ListResourceAllowancesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListResourceAllowances(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_tasks(self) -> Callable[ - [batch.ListTasksRequest], - batch.ListTasksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTasks(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_job(self) -> Callable[ - [batch.UpdateJobRequest], - gcb_job.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_resource_allowance(self) -> Callable[ - [batch.UpdateResourceAllowanceRequest], - gcb_resource_allowance.ResourceAllowance]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateResourceAllowance(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseBatchServiceRestTransport._BaseGetLocation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseBatchServiceRestTransport._BaseListLocations, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseBatchServiceRestTransport._BaseCancelOperation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBatchServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseBatchServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseBatchServiceRestTransport._BaseDeleteOperation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBatchServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseBatchServiceRestTransport._BaseGetOperation, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseBatchServiceRestTransport._BaseListOperations, BatchServiceRestStub): - def __hash__(self): - return hash("BatchServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseBatchServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseBatchServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBatchServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.batch_v1alpha.BatchServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BatchServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.batch_v1alpha.BatchServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.batch.v1alpha.BatchService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'BatchServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/rest_base.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/rest_base.py deleted file mode 100644 index aec33091947e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/services/batch_service/transports/rest_base.py +++ /dev/null @@ -1,768 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import BatchServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseBatchServiceRestTransport(BatchServiceTransport): - """Base REST backend transport for BatchService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'batch.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'batch.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCancelJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha/{name=projects/*/locations/*/jobs/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.CancelJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseCancelJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha/{parent=projects/*/locations/*}/jobs', - 'body': 'job', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.CreateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseCreateJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateResourceAllowance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha/{parent=projects/*/locations/*}/resourceAllowances', - 'body': 'resource_allowance', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.CreateResourceAllowanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseCreateResourceAllowance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha/{name=projects/*/locations/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.DeleteJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteResourceAllowance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha/{name=projects/*/locations/*/resourceAllowances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.DeleteResourceAllowanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseDeleteResourceAllowance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*/jobs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.GetJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseGetJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetResourceAllowance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*/resourceAllowances/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.GetResourceAllowanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseGetResourceAllowance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTask: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*/jobs/*/taskGroups/*/tasks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.GetTaskRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseGetTask._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListJobs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{parent=projects/*/locations/*}/jobs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListResourceAllowances: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{parent=projects/*/locations/*}/resourceAllowances', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.ListResourceAllowancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseListResourceAllowances._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTasks: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{parent=projects/*/locations/*/jobs/*/taskGroups/*}/tasks', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.ListTasksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseListTasks._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateJob: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1alpha/{job.name=projects/*/locations/*/jobs/*}', - 'body': 'job', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.UpdateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseUpdateJob._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateResourceAllowance: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1alpha/{resource_allowance.name=projects/*/locations/*/resourceAllowances/*}', - 'body': 'resource_allowance', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = batch.UpdateResourceAllowanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBatchServiceRestTransport._BaseUpdateResourceAllowance._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseBatchServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/__init__.py deleted file mode 100644 index c8d11dd55cc5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/__init__.py +++ /dev/null @@ -1,126 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .batch import ( - CancelJobRequest, - CancelJobResponse, - CreateJobRequest, - CreateResourceAllowanceRequest, - DeleteJobRequest, - DeleteResourceAllowanceRequest, - GetJobRequest, - GetResourceAllowanceRequest, - GetTaskRequest, - ListJobsRequest, - ListJobsResponse, - ListResourceAllowancesRequest, - ListResourceAllowancesResponse, - ListTasksRequest, - ListTasksResponse, - OperationMetadata, - UpdateJobRequest, - UpdateResourceAllowanceRequest, -) -from .job import ( - AllocationPolicy, - Job, - JobDependency, - JobNotification, - JobStatus, - LogsPolicy, - ResourceUsage, - ServiceAccount, - TaskGroup, -) -from .notification import ( - Notification, -) -from .resource_allowance import ( - ResourceAllowance, - UsageResourceAllowance, - UsageResourceAllowanceSpec, - UsageResourceAllowanceStatus, - CalendarPeriod, - ResourceAllowanceState, -) -from .task import ( - ComputeResource, - Environment, - LifecyclePolicy, - Runnable, - StatusEvent, - Task, - TaskExecution, - TaskResourceUsage, - TaskSpec, - TaskStatus, -) -from .volume import ( - GCS, - NFS, - PD, - Volume, -) - -__all__ = ( - 'CancelJobRequest', - 'CancelJobResponse', - 'CreateJobRequest', - 'CreateResourceAllowanceRequest', - 'DeleteJobRequest', - 'DeleteResourceAllowanceRequest', - 'GetJobRequest', - 'GetResourceAllowanceRequest', - 'GetTaskRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListResourceAllowancesRequest', - 'ListResourceAllowancesResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'OperationMetadata', - 'UpdateJobRequest', - 'UpdateResourceAllowanceRequest', - 'AllocationPolicy', - 'Job', - 'JobDependency', - 'JobNotification', - 'JobStatus', - 'LogsPolicy', - 'ResourceUsage', - 'ServiceAccount', - 'TaskGroup', - 'Notification', - 'ResourceAllowance', - 'UsageResourceAllowance', - 'UsageResourceAllowanceSpec', - 'UsageResourceAllowanceStatus', - 'CalendarPeriod', - 'ResourceAllowanceState', - 'ComputeResource', - 'Environment', - 'LifecyclePolicy', - 'Runnable', - 'StatusEvent', - 'Task', - 'TaskExecution', - 'TaskResourceUsage', - 'TaskSpec', - 'TaskStatus', - 'GCS', - 'NFS', - 'PD', - 'Volume', -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/batch.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/batch.py deleted file mode 100644 index 08fd2b7e5525..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/batch.py +++ /dev/null @@ -1,741 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1alpha', - manifest={ - 'CreateJobRequest', - 'GetJobRequest', - 'DeleteJobRequest', - 'CancelJobRequest', - 'CancelJobResponse', - 'UpdateJobRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListTasksRequest', - 'ListTasksResponse', - 'GetTaskRequest', - 'CreateResourceAllowanceRequest', - 'GetResourceAllowanceRequest', - 'DeleteResourceAllowanceRequest', - 'ListResourceAllowancesRequest', - 'ListResourceAllowancesResponse', - 'UpdateResourceAllowanceRequest', - 'OperationMetadata', - }, -) - - -class CreateJobRequest(proto.Message): - r"""CreateJob Request. - - Attributes: - parent (str): - Required. The parent resource name where the - Job will be created. Pattern: - "projects/{project}/locations/{location}". - job_id (str): - ID used to uniquely identify the Job within its parent - scope. This field should contain at most 63 characters and - must start with lowercase characters. Only lowercase - characters, numbers and '-' are accepted. The '-' character - cannot be the first or the last one. A system generated ID - will be used if the field is not set. - - The job.name field in the request will be ignored and the - created resource name of the Job will be - "{parent}/jobs/{job_id}". - job (google.cloud.batch_v1alpha.types.Job): - Required. The Job to create. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - job: gcb_job.Job = proto.Field( - proto.MESSAGE, - number=3, - message=gcb_job.Job, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class GetJobRequest(proto.Message): - r"""GetJob Request. - - Attributes: - name (str): - Required. Job name. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobRequest(proto.Message): - r"""DeleteJob Request. - - Attributes: - name (str): - Job name. - reason (str): - Optional. Reason for this deletion. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - reason: str = proto.Field( - proto.STRING, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CancelJobRequest(proto.Message): - r"""CancelJob Request. - - Attributes: - name (str): - Required. Job name. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class CancelJobResponse(proto.Message): - r"""Response to the CancelJob request. - """ - - -class UpdateJobRequest(proto.Message): - r"""UpdateJob Request. - - Attributes: - job (google.cloud.batch_v1alpha.types.Job): - Required. The Job to update. Only fields specified in - ``updateMask`` are updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - The ``jobs.patch`` method can only be used while a job is in - the ``QUEUED``, ``SCHEDULED``, or ``RUNNING`` state and - currently only supports increasing the value of the first - ``taskCount`` field in the job's ``taskGroups`` field. - Therefore, you must set the value of ``updateMask`` to - ``taskGroups``. Any other job fields in the update request - will be ignored. - - For example, to update a job's ``taskCount`` to ``2``, set - ``updateMask`` to ``taskGroups`` and use the following - request body: - - :: - - { - "taskGroups":[{ - "taskCount": 2 - }] - } - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - job: gcb_job.Job = proto.Field( - proto.MESSAGE, - number=1, - message=gcb_job.Job, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobsRequest(proto.Message): - r"""ListJob Request. - - Attributes: - parent (str): - Parent path. - filter (str): - List filter. - order_by (str): - Optional. Sort results. Supported are "name", "name desc", - "create_time", and "create_time desc". - page_size (int): - Page size. - page_token (str): - Page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobsResponse(proto.Message): - r"""ListJob Response. - - Attributes: - jobs (MutableSequence[google.cloud.batch_v1alpha.types.Job]): - Jobs. - next_page_token (str): - Next page token. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence[gcb_job.Job] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcb_job.Job, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class ListTasksRequest(proto.Message): - r"""ListTasks Request. - - Attributes: - parent (str): - Required. Name of a TaskGroup from which Tasks are being - requested. Pattern: - "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}". - filter (str): - Task filter, null filter matches all Tasks. - Filter string should be of the format - State=TaskStatus.State e.g. State=RUNNING - order_by (str): - Not implemented. - page_size (int): - Page size. - page_token (str): - Page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListTasksResponse(proto.Message): - r"""ListTasks Response. - - Attributes: - tasks (MutableSequence[google.cloud.batch_v1alpha.types.Task]): - Tasks. - next_page_token (str): - Next page token. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - tasks: MutableSequence[task.Task] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=task.Task, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetTaskRequest(proto.Message): - r"""Request for a single Task by name. - - Attributes: - name (str): - Required. Task name. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateResourceAllowanceRequest(proto.Message): - r"""CreateResourceAllowance Request. - - Attributes: - parent (str): - Required. The parent resource name where the - ResourceAllowance will be created. Pattern: - "projects/{project}/locations/{location}". - resource_allowance_id (str): - ID used to uniquely identify the ResourceAllowance within - its parent scope. This field should contain at most 63 - characters and must start with lowercase characters. Only - lowercase characters, numbers and '-' are accepted. The '-' - character cannot be the first or the last one. A system - generated ID will be used if the field is not set. - - The resource_allowance.name field in the request will be - ignored and the created resource name of the - ResourceAllowance will be - "{parent}/resourceAllowances/{resource_allowance_id}". - resource_allowance (google.cloud.batch_v1alpha.types.ResourceAllowance): - Required. The ResourceAllowance to create. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - resource_allowance_id: str = proto.Field( - proto.STRING, - number=2, - ) - resource_allowance: gcb_resource_allowance.ResourceAllowance = proto.Field( - proto.MESSAGE, - number=3, - message=gcb_resource_allowance.ResourceAllowance, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class GetResourceAllowanceRequest(proto.Message): - r"""GetResourceAllowance Request. - - Attributes: - name (str): - Required. ResourceAllowance name. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteResourceAllowanceRequest(proto.Message): - r"""DeleteResourceAllowance Request. - - Attributes: - name (str): - Required. ResourceAllowance name. - reason (str): - Optional. Reason for this deletion. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - reason: str = proto.Field( - proto.STRING, - number=2, - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListResourceAllowancesRequest(proto.Message): - r"""ListResourceAllowances Request. - - Attributes: - parent (str): - Required. Parent path. - page_size (int): - Optional. Page size. - page_token (str): - Optional. Page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListResourceAllowancesResponse(proto.Message): - r"""ListResourceAllowances Response. - - Attributes: - resource_allowances (MutableSequence[google.cloud.batch_v1alpha.types.ResourceAllowance]): - ResourceAllowances. - next_page_token (str): - Next page token. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - resource_allowances: MutableSequence[gcb_resource_allowance.ResourceAllowance] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcb_resource_allowance.ResourceAllowance, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class UpdateResourceAllowanceRequest(proto.Message): - r"""UpdateResourceAllowance Request. - - Attributes: - resource_allowance (google.cloud.batch_v1alpha.types.ResourceAllowance): - Required. The ResourceAllowance to update. Update - description. Only fields specified in ``update_mask`` are - updated. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - - Field mask is used to specify the fields to be overwritten - in the ResourceAllowance resource by the update. The fields - specified in the update_mask are relative to the resource, - not the full request. A field will be overwritten if it is - in the mask. If the user does not provide a mask then all - fields will be overwritten. - - UpdateResourceAllowance request now only supports update on - ``limit`` field. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - """ - - resource_allowance: gcb_resource_allowance.ResourceAllowance = proto.Field( - proto.MESSAGE, - number=1, - message=gcb_resource_allowance.ResourceAllowance, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have - [google.longrunning.Operation.error][google.longrunning.Operation.error] - value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/job.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/job.py deleted file mode 100644 index ba3011bee1d2..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/job.py +++ /dev/null @@ -1,1342 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1alpha.types import task -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1alpha', - manifest={ - 'Job', - 'LogsPolicy', - 'JobDependency', - 'JobStatus', - 'ResourceUsage', - 'JobNotification', - 'AllocationPolicy', - 'TaskGroup', - 'ServiceAccount', - }, -) - - -class Job(proto.Message): - r"""The Cloud Batch Job description. - - Attributes: - name (str): - Output only. Job name. - For example: - "projects/123456/locations/us-central1/jobs/job01". - uid (str): - Output only. A system generated unique ID for - the Job. - priority (int): - Priority of the Job. The valid value range is [0, 100). - Default value is 0. Higher value indicates higher priority. - A job with higher priority value is more likely to run - earlier if all other requirements are satisfied. - task_groups (MutableSequence[google.cloud.batch_v1alpha.types.TaskGroup]): - Required. TaskGroups in the Job. Only one - TaskGroup is supported now. - scheduling_policy (google.cloud.batch_v1alpha.types.Job.SchedulingPolicy): - Scheduling policy for TaskGroups in the job. - dependencies (MutableSequence[google.cloud.batch_v1alpha.types.JobDependency]): - At least one of the dependencies must be - satisfied before the Job is scheduled to run. - Only one JobDependency is supported now. - Not yet implemented. - allocation_policy (google.cloud.batch_v1alpha.types.AllocationPolicy): - Compute resource allocation for all - TaskGroups in the Job. - labels (MutableMapping[str, str]): - Custom labels to apply to the job and any Cloud Logging - `LogEntry `__ - that it generates. - - Use labels to group and describe the resources they are - applied to. Batch automatically applies predefined labels - and supports multiple ``labels`` fields for each job, which - each let you apply custom labels to various resources. Label - names that start with "goog-" or "google-" are reserved for - predefined labels. For more information about labels with - Batch, see `Organize resources using - labels `__. - status (google.cloud.batch_v1alpha.types.JobStatus): - Output only. Job status. It is read only for - users. - notification (google.cloud.batch_v1alpha.types.JobNotification): - Deprecated: please use notifications instead. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. When the Job was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last time the Job was - updated. - logs_policy (google.cloud.batch_v1alpha.types.LogsPolicy): - Log preservation policy for the Job. - notifications (MutableSequence[google.cloud.batch_v1alpha.types.JobNotification]): - Notification configurations. - """ - class SchedulingPolicy(proto.Enum): - r"""The order that TaskGroups are scheduled relative to each - other. - Not yet implemented. - - Values: - SCHEDULING_POLICY_UNSPECIFIED (0): - Unspecified. - AS_SOON_AS_POSSIBLE (1): - Run all TaskGroups as soon as possible. - """ - SCHEDULING_POLICY_UNSPECIFIED = 0 - AS_SOON_AS_POSSIBLE = 1 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - priority: int = proto.Field( - proto.INT64, - number=3, - ) - task_groups: MutableSequence['TaskGroup'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='TaskGroup', - ) - scheduling_policy: SchedulingPolicy = proto.Field( - proto.ENUM, - number=5, - enum=SchedulingPolicy, - ) - dependencies: MutableSequence['JobDependency'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='JobDependency', - ) - allocation_policy: 'AllocationPolicy' = proto.Field( - proto.MESSAGE, - number=7, - message='AllocationPolicy', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - status: 'JobStatus' = proto.Field( - proto.MESSAGE, - number=9, - message='JobStatus', - ) - notification: 'JobNotification' = proto.Field( - proto.MESSAGE, - number=10, - message='JobNotification', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - logs_policy: 'LogsPolicy' = proto.Field( - proto.MESSAGE, - number=13, - message='LogsPolicy', - ) - notifications: MutableSequence['JobNotification'] = proto.RepeatedField( - proto.MESSAGE, - number=14, - message='JobNotification', - ) - - -class LogsPolicy(proto.Message): - r"""LogsPolicy describes if and how a job's logs are preserved. Logs - include information that is automatically written by the Batch - service agent and any information that you configured the job's - runnables to write to the ``stdout`` or ``stderr`` streams. - - Attributes: - destination (google.cloud.batch_v1alpha.types.LogsPolicy.Destination): - If and where logs should be saved. - logs_path (str): - When ``destination`` is set to ``PATH``, you must set this - field to the path where you want logs to be saved. This path - can point to a local directory on the VM or (if congifured) - a directory under the mount path of any Cloud Storage - bucket, network file system (NFS), or writable persistent - disk that is mounted to the job. For example, if the job has - a bucket with ``mountPath`` set to ``/mnt/disks/my-bucket``, - you can write logs to the root directory of the - ``remotePath`` of that bucket by setting this field to - ``/mnt/disks/my-bucket/``. - cloud_logging_option (google.cloud.batch_v1alpha.types.LogsPolicy.CloudLoggingOption): - Optional. When ``destination`` is set to ``CLOUD_LOGGING``, - you can optionally set this field to configure additional - settings for Cloud Logging. - """ - class Destination(proto.Enum): - r"""The destination (if any) for logs. - - Values: - DESTINATION_UNSPECIFIED (0): - (Default) Logs are not preserved. - CLOUD_LOGGING (1): - Logs are streamed to Cloud Logging. Optionally, you can - configure additional settings in the ``cloudLoggingOption`` - field. - PATH (2): - Logs are saved to the file path specified in the - ``logsPath`` field. - """ - DESTINATION_UNSPECIFIED = 0 - CLOUD_LOGGING = 1 - PATH = 2 - - class CloudLoggingOption(proto.Message): - r"""``CloudLoggingOption`` contains additional settings for Cloud - Logging logs generated by Batch job. - - Attributes: - use_generic_task_monitored_resource (bool): - Optional. Set this field to ``true`` to change the - `monitored resource - type `__ - for Cloud Logging logs generated by this Batch job from the - ```batch.googleapis.com/Job`` `__ - type to the formerly used - ```generic_task`` `__ - type. - """ - - use_generic_task_monitored_resource: bool = proto.Field( - proto.BOOL, - number=1, - ) - - destination: Destination = proto.Field( - proto.ENUM, - number=1, - enum=Destination, - ) - logs_path: str = proto.Field( - proto.STRING, - number=2, - ) - cloud_logging_option: CloudLoggingOption = proto.Field( - proto.MESSAGE, - number=3, - message=CloudLoggingOption, - ) - - -class JobDependency(proto.Message): - r"""JobDependency describes the state of other Jobs that the - start of this Job depends on. - All dependent Jobs must have been submitted in the same region. - - Attributes: - items (MutableMapping[str, google.cloud.batch_v1alpha.types.JobDependency.Type]): - Each item maps a Job name to a Type. - All items must be satisfied for the - JobDependency to be satisfied (the AND - operation). - Once a condition for one item becomes true, it - won't go back to false even the dependent Job - state changes again. - """ - class Type(proto.Enum): - r"""Dependency type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - SUCCEEDED (1): - The dependent Job has succeeded. - FAILED (2): - The dependent Job has failed. - FINISHED (3): - SUCCEEDED or FAILED. - """ - TYPE_UNSPECIFIED = 0 - SUCCEEDED = 1 - FAILED = 2 - FINISHED = 3 - - items: MutableMapping[str, Type] = proto.MapField( - proto.STRING, - proto.ENUM, - number=1, - enum=Type, - ) - - -class JobStatus(proto.Message): - r"""Job status. - - Attributes: - state (google.cloud.batch_v1alpha.types.JobStatus.State): - Job state - status_events (MutableSequence[google.cloud.batch_v1alpha.types.StatusEvent]): - Job status events - task_groups (MutableMapping[str, google.cloud.batch_v1alpha.types.JobStatus.TaskGroupStatus]): - Aggregated task status for each TaskGroup in - the Job. The map key is TaskGroup ID. - run_duration (google.protobuf.duration_pb2.Duration): - The duration of time that the Job spent in - status RUNNING. - resource_usage (google.cloud.batch_v1alpha.types.ResourceUsage): - The resource usage of the job. - """ - class State(proto.Enum): - r"""Valid Job states. - - Values: - STATE_UNSPECIFIED (0): - Job state unspecified. - QUEUED (1): - Job is admitted (validated and persisted) and - waiting for resources. - SCHEDULED (2): - Job is scheduled to run as soon as resource - allocation is ready. The resource allocation may - happen at a later time but with a high chance to - succeed. - RUNNING (3): - Resource allocation has been successful. At - least one Task in the Job is RUNNING. - SUCCEEDED (4): - All Tasks in the Job have finished - successfully. - FAILED (5): - At least one Task in the Job has failed. - DELETION_IN_PROGRESS (6): - The Job will be deleted, but has not been - deleted yet. Typically this is because resources - used by the Job are still being cleaned up. - CANCELLATION_IN_PROGRESS (7): - The Job cancellation is in progress, this is - because the resources used by the Job are still - being cleaned up. - CANCELLED (8): - The Job has been cancelled, the task - executions were stopped and the resources were - cleaned up. - """ - STATE_UNSPECIFIED = 0 - QUEUED = 1 - SCHEDULED = 2 - RUNNING = 3 - SUCCEEDED = 4 - FAILED = 5 - DELETION_IN_PROGRESS = 6 - CANCELLATION_IN_PROGRESS = 7 - CANCELLED = 8 - - class InstanceStatus(proto.Message): - r"""VM instance status. - - Attributes: - machine_type (str): - The Compute Engine machine type. - provisioning_model (google.cloud.batch_v1alpha.types.AllocationPolicy.ProvisioningModel): - The VM instance provisioning model. - task_pack (int): - The max number of tasks can be assigned to - this instance type. - boot_disk (google.cloud.batch_v1alpha.types.AllocationPolicy.Disk): - The VM boot disk. - """ - - machine_type: str = proto.Field( - proto.STRING, - number=1, - ) - provisioning_model: 'AllocationPolicy.ProvisioningModel' = proto.Field( - proto.ENUM, - number=2, - enum='AllocationPolicy.ProvisioningModel', - ) - task_pack: int = proto.Field( - proto.INT64, - number=3, - ) - boot_disk: 'AllocationPolicy.Disk' = proto.Field( - proto.MESSAGE, - number=4, - message='AllocationPolicy.Disk', - ) - - class TaskGroupStatus(proto.Message): - r"""Aggregated task status for a TaskGroup. - - Attributes: - counts (MutableMapping[str, int]): - Count of task in each state in the TaskGroup. - The map key is task state name. - instances (MutableSequence[google.cloud.batch_v1alpha.types.JobStatus.InstanceStatus]): - Status of instances allocated for the - TaskGroup. - """ - - counts: MutableMapping[str, int] = proto.MapField( - proto.STRING, - proto.INT64, - number=1, - ) - instances: MutableSequence['JobStatus.InstanceStatus'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='JobStatus.InstanceStatus', - ) - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - status_events: MutableSequence[task.StatusEvent] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=task.StatusEvent, - ) - task_groups: MutableMapping[str, TaskGroupStatus] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=4, - message=TaskGroupStatus, - ) - run_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=5, - message=duration_pb2.Duration, - ) - resource_usage: 'ResourceUsage' = proto.Field( - proto.MESSAGE, - number=6, - message='ResourceUsage', - ) - - -class ResourceUsage(proto.Message): - r"""ResourceUsage describes the resource usage of the job. - - Attributes: - core_hours (float): - The CPU core hours that the job consumes. - """ - - core_hours: float = proto.Field( - proto.DOUBLE, - number=1, - ) - - -class JobNotification(proto.Message): - r"""Notification configurations. - - Attributes: - pubsub_topic (str): - The Pub/Sub topic where notifications for the job, like - state changes, will be published. If undefined, no Pub/Sub - notifications are sent for this job. - - Specify the topic using the following format: - ``projects/{project}/topics/{topic}``. Notably, if you want - to specify a Pub/Sub topic that is in a different project - than the job, your administrator must grant your project's - Batch service agent permission to publish to that topic. - - For more information about configuring Pub/Sub notifications - for a job, see - https://cloud.google.com/batch/docs/enable-notifications. - message (google.cloud.batch_v1alpha.types.JobNotification.Message): - The attribute requirements of messages to be - sent to this Pub/Sub topic. Without this field, - no message will be sent. - """ - class Type(proto.Enum): - r"""The message type. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified. - JOB_STATE_CHANGED (1): - Notify users that the job state has changed. - TASK_STATE_CHANGED (2): - Notify users that the task state has changed. - """ - TYPE_UNSPECIFIED = 0 - JOB_STATE_CHANGED = 1 - TASK_STATE_CHANGED = 2 - - class Message(proto.Message): - r"""Message details. Describe the conditions under which messages will - be sent. If no attribute is defined, no message will be sent by - default. One message should specify either the job or the task level - attributes, but not both. For example, job level: JOB_STATE_CHANGED - and/or a specified new_job_state; task level: TASK_STATE_CHANGED - and/or a specified new_task_state. - - Attributes: - type_ (google.cloud.batch_v1alpha.types.JobNotification.Type): - The message type. - new_job_state (google.cloud.batch_v1alpha.types.JobStatus.State): - The new job state. - new_task_state (google.cloud.batch_v1alpha.types.TaskStatus.State): - The new task state. - """ - - type_: 'JobNotification.Type' = proto.Field( - proto.ENUM, - number=1, - enum='JobNotification.Type', - ) - new_job_state: 'JobStatus.State' = proto.Field( - proto.ENUM, - number=2, - enum='JobStatus.State', - ) - new_task_state: task.TaskStatus.State = proto.Field( - proto.ENUM, - number=3, - enum=task.TaskStatus.State, - ) - - pubsub_topic: str = proto.Field( - proto.STRING, - number=1, - ) - message: Message = proto.Field( - proto.MESSAGE, - number=2, - message=Message, - ) - - -class AllocationPolicy(proto.Message): - r"""A Job's resource allocation policy describes when, where, and - how compute resources should be allocated for the Job. - - Attributes: - location (google.cloud.batch_v1alpha.types.AllocationPolicy.LocationPolicy): - Location where compute resources should be - allocated for the Job. - instance (google.cloud.batch_v1alpha.types.AllocationPolicy.InstancePolicy): - Deprecated: please use instances[0].policy instead. - instances (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.InstancePolicyOrTemplate]): - Describe instances that can be created by this - AllocationPolicy. Only instances[0] is supported now. - instance_templates (MutableSequence[str]): - Deprecated: please use instances[0].template instead. - provisioning_models (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.ProvisioningModel]): - Deprecated: please use - instances[0].policy.provisioning_model instead. - service_account_email (str): - Deprecated: please use service_account instead. - service_account (google.cloud.batch_v1alpha.types.ServiceAccount): - Defines the service account for Batch-created VMs. If - omitted, the `default Compute Engine service - account `__ - is used. Must match the service account specified in any - used instance template configured in the Batch job. - - Includes the following fields: - - - email: The service account's email address. If not set, - the default Compute Engine service account is used. - - scopes: Additional OAuth scopes to grant the service - account, beyond the default cloud-platform scope. (list - of strings) - labels (MutableMapping[str, str]): - Custom labels to apply to the job and all the Compute Engine - resources that both are created by this allocation policy - and support labels. - - Use labels to group and describe the resources they are - applied to. Batch automatically applies predefined labels - and supports multiple ``labels`` fields for each job, which - each let you apply custom labels to various resources. Label - names that start with "goog-" or "google-" are reserved for - predefined labels. For more information about labels with - Batch, see `Organize resources using - labels `__. - network (google.cloud.batch_v1alpha.types.AllocationPolicy.NetworkPolicy): - The network policy. - - If you define an instance template in the - ``InstancePolicyOrTemplate`` field, Batch will use the - network settings in the instance template instead of this - field. - placement (google.cloud.batch_v1alpha.types.AllocationPolicy.PlacementPolicy): - The placement policy. - tags (MutableSequence[str]): - Optional. Tags applied to the VM instances. - - The tags identify valid sources or targets for network - firewalls. Each tag must be 1-63 characters long, and comply - with `RFC1035 `__. - """ - class ProvisioningModel(proto.Enum): - r"""Compute Engine VM instance provisioning model. - - Values: - PROVISIONING_MODEL_UNSPECIFIED (0): - Unspecified. - STANDARD (1): - Standard VM. - SPOT (2): - SPOT VM. - PREEMPTIBLE (3): - Preemptible VM (PVM). - - Above SPOT VM is the preferable model for - preemptible VM instances: the old preemptible VM - model (indicated by this field) is the older - model, and has been migrated to use the SPOT - model as the underlying technology. This old - model will still be supported. - """ - PROVISIONING_MODEL_UNSPECIFIED = 0 - STANDARD = 1 - SPOT = 2 - PREEMPTIBLE = 3 - - class LocationPolicy(proto.Message): - r""" - - Attributes: - allowed_locations (MutableSequence[str]): - A list of allowed location names represented by internal - URLs. - - Each location can be a region or a zone. Only one region or - multiple zones in one region is supported now. For example, - ["regions/us-central1"] allow VMs in any zones in region - us-central1. ["zones/us-central1-a", "zones/us-central1-c"] - only allow VMs in zones us-central1-a and us-central1-c. - - Mixing locations from different regions would cause errors. - For example, ["regions/us-central1", "zones/us-central1-a", - "zones/us-central1-b", "zones/us-west1-a"] contains - locations from two distinct regions: us-central1 and - us-west1. This combination will trigger an error. - denied_locations (MutableSequence[str]): - A list of denied location names. - - Not yet implemented. - """ - - allowed_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - denied_locations: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class Disk(proto.Message): - r"""A new persistent disk or a local ssd. - A VM can only have one local SSD setting but multiple local SSD - partitions. See - https://cloud.google.com/compute/docs/disks#pdspecs and - https://cloud.google.com/compute/docs/disks#localssds. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - image (str): - URL for a VM image to use as the data source for this disk. - For example, the following are all valid URLs: - - - Specify the image by its family name: - projects/{project}/global/images/family/{image_family} - - Specify the image version: - projects/{project}/global/images/{image_version} - - You can also use Batch customized image in short names. The - following image values are supported for a boot disk: - - - ``batch-debian``: use Batch Debian images. - - ``batch-cos``: use Batch Container-Optimized images. - - ``batch-hpc-rocky``: use Batch HPC Rocky Linux images. - - This field is a member of `oneof`_ ``data_source``. - snapshot (str): - Name of a snapshot used as the data source. - Snapshot is not supported as boot disk now. - - This field is a member of `oneof`_ ``data_source``. - type_ (str): - Disk type as shown in ``gcloud compute disk-types list``. - For example, local SSD uses type "local-ssd". Persistent - disks and boot disks use "pd-balanced", "pd-extreme", - "pd-ssd" or "pd-standard". If not specified, "pd-standard" - will be used as the default type for non-boot disks, - "pd-balanced" will be used as the default type for boot - disks. - size_gb (int): - Disk size in GB. - - **Non-Boot Disk**: If the ``type`` specifies a persistent - disk, this field is ignored if ``data_source`` is set as - ``image`` or ``snapshot``. If the ``type`` specifies a local - SSD, this field should be a multiple of 375 GB, otherwise, - the final size will be the next greater multiple of 375 GB. - - **Boot Disk**: Batch will calculate the boot disk size based - on source image and task requirements if you do not speicify - the size. If both this field and the ``boot_disk_mib`` field - in task spec's ``compute_resource`` are defined, Batch will - only honor this field. Also, this field should be no smaller - than the source disk's size when the ``data_source`` is set - as ``snapshot`` or ``image``. For example, if you set an - image as the ``data_source`` field and the image's default - disk size 30 GB, you can only use this field to make the - disk larger or equal to 30 GB. - disk_interface (str): - Local SSDs are available through both "SCSI" and "NVMe" - interfaces. If not indicated, "NVMe" will be the default one - for local ssds. This field is ignored for persistent disks - as the interface is chosen automatically. See - https://cloud.google.com/compute/docs/disks/persistent-disks#choose_an_interface. - """ - - image: str = proto.Field( - proto.STRING, - number=4, - oneof='data_source', - ) - snapshot: str = proto.Field( - proto.STRING, - number=5, - oneof='data_source', - ) - type_: str = proto.Field( - proto.STRING, - number=1, - ) - size_gb: int = proto.Field( - proto.INT64, - number=2, - ) - disk_interface: str = proto.Field( - proto.STRING, - number=6, - ) - - class AttachedDisk(proto.Message): - r"""A new or an existing persistent disk (PD) or a local ssd - attached to a VM instance. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - new_disk (google.cloud.batch_v1alpha.types.AllocationPolicy.Disk): - - This field is a member of `oneof`_ ``attached``. - existing_disk (str): - Name of an existing PD. - - This field is a member of `oneof`_ ``attached``. - device_name (str): - Device name that the guest operating system will see. It is - used by Runnable.volumes field to mount disks. So please - specify the device_name if you want Batch to help mount the - disk, and it should match the device_name field in volumes. - """ - - new_disk: 'AllocationPolicy.Disk' = proto.Field( - proto.MESSAGE, - number=1, - oneof='attached', - message='AllocationPolicy.Disk', - ) - existing_disk: str = proto.Field( - proto.STRING, - number=2, - oneof='attached', - ) - device_name: str = proto.Field( - proto.STRING, - number=3, - ) - - class Accelerator(proto.Message): - r"""Accelerator describes Compute Engine accelerators to be - attached to the VM. - - Attributes: - type_ (str): - The accelerator type. For example, "nvidia-tesla-t4". See - ``gcloud compute accelerator-types list``. - count (int): - The number of accelerators of this type. - install_gpu_drivers (bool): - Deprecated: please use instances[0].install_gpu_drivers - instead. - driver_version (str): - Optional. The NVIDIA GPU driver version that - should be installed for this type. - - You can define the specific driver version such - as "470.103.01", following the driver version - requirements in - https://cloud.google.com/compute/docs/gpus/install-drivers-gpu#minimum-driver. - Batch will install the specific accelerator - driver if qualified. - """ - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - count: int = proto.Field( - proto.INT64, - number=2, - ) - install_gpu_drivers: bool = proto.Field( - proto.BOOL, - number=3, - ) - driver_version: str = proto.Field( - proto.STRING, - number=4, - ) - - class InstancePolicy(proto.Message): - r"""InstancePolicy describes an instance type and resources - attached to each VM created by this InstancePolicy. - - Attributes: - allowed_machine_types (MutableSequence[str]): - Deprecated: please use machine_type instead. - machine_type (str): - The Compute Engine machine type. - min_cpu_platform (str): - The minimum CPU platform. - See - https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform. - provisioning_model (google.cloud.batch_v1alpha.types.AllocationPolicy.ProvisioningModel): - The provisioning model. - accelerators (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.Accelerator]): - The accelerators attached to each VM - instance. - boot_disk (google.cloud.batch_v1alpha.types.AllocationPolicy.Disk): - Boot disk to be created and attached to each - VM by this InstancePolicy. Boot disk will be - deleted when the VM is deleted. Batch API now - only supports booting from image. - disks (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.AttachedDisk]): - Non-boot disks to be attached for each VM - created by this InstancePolicy. New disks will - be deleted when the VM is deleted. A non-boot - disk is a disk that can be of a device with a - file system or a raw storage drive that is not - ready for data storage and accessing. - reservation (str): - Optional. If not specified (default), VMs will consume any - applicable reservation. If "NO_RESERVATION" is specified, - VMs will not consume any reservation. Otherwise, if - specified, VMs will consume only the specified reservation. - """ - - allowed_machine_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - machine_type: str = proto.Field( - proto.STRING, - number=2, - ) - min_cpu_platform: str = proto.Field( - proto.STRING, - number=3, - ) - provisioning_model: 'AllocationPolicy.ProvisioningModel' = proto.Field( - proto.ENUM, - number=4, - enum='AllocationPolicy.ProvisioningModel', - ) - accelerators: MutableSequence['AllocationPolicy.Accelerator'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AllocationPolicy.Accelerator', - ) - boot_disk: 'AllocationPolicy.Disk' = proto.Field( - proto.MESSAGE, - number=8, - message='AllocationPolicy.Disk', - ) - disks: MutableSequence['AllocationPolicy.AttachedDisk'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AllocationPolicy.AttachedDisk', - ) - reservation: str = proto.Field( - proto.STRING, - number=7, - ) - - class InstancePolicyOrTemplate(proto.Message): - r"""InstancePolicyOrTemplate lets you define the type of - resources to use for this job either with an InstancePolicy or - an instance template. If undefined, Batch picks the type of VM - to use and doesn't include optional VM resources such as GPUs - and extra disks. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - policy (google.cloud.batch_v1alpha.types.AllocationPolicy.InstancePolicy): - InstancePolicy. - - This field is a member of `oneof`_ ``policy_template``. - instance_template (str): - Name of an instance template used to create VMs. Named the - field as 'instance_template' instead of 'template' to avoid - C++ keyword conflict. - - Batch only supports global instance templates from the same - project as the job. You can specify the global instance - template as a full or partial URL. - - This field is a member of `oneof`_ ``policy_template``. - install_gpu_drivers (bool): - Set this field true if you want Batch to help fetch drivers - from a third party location and install them for GPUs - specified in ``policy.accelerators`` or - ``instance_template`` on your behalf. Default is false. - - For Container-Optimized Image cases, Batch will install the - accelerator driver following milestones of - https://cloud.google.com/container-optimized-os/docs/release-notes. - For non Container-Optimized Image cases, following - https://github.com/GoogleCloudPlatform/compute-gpu-installation/blob/main/linux/install_gpu_driver.py. - install_ops_agent (bool): - Optional. Set this field true if you want - Batch to install Ops Agent on your behalf. - Default is false. - block_project_ssh_keys (bool): - Optional. Set this field to ``true`` if you want Batch to - block project-level SSH keys from accessing this job's VMs. - Alternatively, you can configure the job to specify a VM - instance template that blocks project-level SSH keys. In - either case, Batch blocks project-level SSH keys while - creating the VMs for this job. - - Batch allows project-level SSH keys for a job's VMs only if - all the following are true: - - - This field is undefined or set to ``false``. - - The job's VM instance template (if any) doesn't block - project-level SSH keys. - - Notably, you can override this behavior by manually updating - a VM to block or allow project-level SSH keys. For more - information about blocking project-level SSH keys, see the - Compute Engine documentation: - https://cloud.google.com/compute/docs/connect/restrict-ssh-keys#block-keys - """ - - policy: 'AllocationPolicy.InstancePolicy' = proto.Field( - proto.MESSAGE, - number=1, - oneof='policy_template', - message='AllocationPolicy.InstancePolicy', - ) - instance_template: str = proto.Field( - proto.STRING, - number=2, - oneof='policy_template', - ) - install_gpu_drivers: bool = proto.Field( - proto.BOOL, - number=3, - ) - install_ops_agent: bool = proto.Field( - proto.BOOL, - number=4, - ) - block_project_ssh_keys: bool = proto.Field( - proto.BOOL, - number=5, - ) - - class NetworkInterface(proto.Message): - r"""A network interface. - - Attributes: - network (str): - The URL of an existing network resource. You can specify the - network as a full or partial URL. - - For example, the following are all valid URLs: - - - https://www.googleapis.com/compute/v1/projects/{project}/global/networks/{network} - - projects/{project}/global/networks/{network} - - global/networks/{network} - subnetwork (str): - The URL of an existing subnetwork resource in the network. - You can specify the subnetwork as a full or partial URL. - - For example, the following are all valid URLs: - - - https://www.googleapis.com/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork} - - projects/{project}/regions/{region}/subnetworks/{subnetwork} - - regions/{region}/subnetworks/{subnetwork} - no_external_ip_address (bool): - Default is false (with an external IP - address). Required if no external public IP - address is attached to the VM. If no external - public IP address, additional configuration is - required to allow the VM to access Google - Services. See - https://cloud.google.com/vpc/docs/configure-private-google-access - and - https://cloud.google.com/nat/docs/gce-example#create-nat - for more information. - """ - - network: str = proto.Field( - proto.STRING, - number=1, - ) - subnetwork: str = proto.Field( - proto.STRING, - number=2, - ) - no_external_ip_address: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class NetworkPolicy(proto.Message): - r"""NetworkPolicy describes VM instance network configurations. - - Attributes: - network_interfaces (MutableSequence[google.cloud.batch_v1alpha.types.AllocationPolicy.NetworkInterface]): - Network configurations. - """ - - network_interfaces: MutableSequence['AllocationPolicy.NetworkInterface'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AllocationPolicy.NetworkInterface', - ) - - class PlacementPolicy(proto.Message): - r"""PlacementPolicy describes a group placement policy for the - VMs controlled by this AllocationPolicy. - - Attributes: - collocation (str): - UNSPECIFIED vs. COLLOCATED (default - UNSPECIFIED). Use COLLOCATED when you want VMs - to be located close to each other for low - network latency between the VMs. No placement - policy will be generated when collocation is - UNSPECIFIED. - max_distance (int): - When specified, causes the job to fail if more than - max_distance logical switches are required between VMs. - Batch uses the most compact possible placement of VMs even - when max_distance is not specified. An explicit max_distance - makes that level of compactness a strict requirement. Not - yet implemented - """ - - collocation: str = proto.Field( - proto.STRING, - number=1, - ) - max_distance: int = proto.Field( - proto.INT64, - number=2, - ) - - location: LocationPolicy = proto.Field( - proto.MESSAGE, - number=1, - message=LocationPolicy, - ) - instance: InstancePolicy = proto.Field( - proto.MESSAGE, - number=2, - message=InstancePolicy, - ) - instances: MutableSequence[InstancePolicyOrTemplate] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message=InstancePolicyOrTemplate, - ) - instance_templates: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - provisioning_models: MutableSequence[ProvisioningModel] = proto.RepeatedField( - proto.ENUM, - number=4, - enum=ProvisioningModel, - ) - service_account_email: str = proto.Field( - proto.STRING, - number=5, - ) - service_account: 'ServiceAccount' = proto.Field( - proto.MESSAGE, - number=9, - message='ServiceAccount', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - network: NetworkPolicy = proto.Field( - proto.MESSAGE, - number=7, - message=NetworkPolicy, - ) - placement: PlacementPolicy = proto.Field( - proto.MESSAGE, - number=10, - message=PlacementPolicy, - ) - tags: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=11, - ) - - -class TaskGroup(proto.Message): - r"""A TaskGroup defines one or more Tasks that all share the same - TaskSpec. - - Attributes: - name (str): - Output only. TaskGroup name. - The system generates this field based on parent - Job name. For example: - - "projects/123456/locations/us-west1/jobs/job01/taskGroups/group01". - task_spec (google.cloud.batch_v1alpha.types.TaskSpec): - Required. Tasks in the group share the same - task spec. - task_count (int): - Number of Tasks in the TaskGroup. - Default is 1. - parallelism (int): - Max number of tasks that can run in parallel. Default to - min(task_count, parallel tasks per job limit). See: `Job - Limits `__. - Field parallelism must be 1 if the scheduling_policy is - IN_ORDER. - scheduling_policy (google.cloud.batch_v1alpha.types.TaskGroup.SchedulingPolicy): - Scheduling policy for Tasks in the TaskGroup. The default - value is AS_SOON_AS_POSSIBLE. - allocation_policy (google.cloud.batch_v1alpha.types.AllocationPolicy): - Compute resource allocation for the - TaskGroup. If specified, it overrides resources - in Job. - labels (MutableMapping[str, str]): - Labels for the TaskGroup. Labels could be user provided or - system generated. You can assign up to 64 labels. `Google - Compute Engine label - restrictions `__ - apply. Label names that start with "goog-" or "google-" are - reserved. - task_environments (MutableSequence[google.cloud.batch_v1alpha.types.Environment]): - An array of environment variable mappings, which are passed - to Tasks with matching indices. If task_environments is used - then task_count should not be specified in the request (and - will be ignored). Task count will be the length of - task_environments. - - Tasks get a BATCH_TASK_INDEX and BATCH_TASK_COUNT - environment variable, in addition to any environment - variables set in task_environments, specifying the number of - Tasks in the Task's parent TaskGroup, and the specific - Task's index in the TaskGroup (0 through BATCH_TASK_COUNT - - 1). - task_count_per_node (int): - Max number of tasks that can be run on a VM - at the same time. If not specified, the system - will decide a value based on available compute - resources on a VM and task requirements. - require_hosts_file (bool): - When true, Batch will populate a file with a list of all VMs - assigned to the TaskGroup and set the BATCH_HOSTS_FILE - environment variable to the path of that file. Defaults to - false. The host file supports up to 1000 VMs. - permissive_ssh (bool): - When true, Batch will configure SSH to allow - passwordless login between VMs running the Batch - tasks in the same TaskGroup. - run_as_non_root (bool): - Optional. If not set or set to false, Batch uses the root - user to execute runnables. If set to true, Batch runs the - runnables using a non-root user. Currently, the non-root - user Batch used is generated by OS Login. For more - information, see `About OS - Login `__. - service_account (google.cloud.batch_v1alpha.types.ServiceAccount): - Optional. ServiceAccount used by tasks within the task group - for the access to other Cloud resources. This allows tasks - to operate with permissions distinct from the service - account for the VM set at ``AllocationPolicy``. Use this - field when tasks require different access rights than those - of the VM. - - Specify the service account's ``email`` field. Ensure - ``scopes`` include any necessary permissions for tasks, in - addition to the default 'cloud-platform' scope. - """ - class SchedulingPolicy(proto.Enum): - r"""How Tasks in the TaskGroup should be scheduled relative to - each other. - - Values: - SCHEDULING_POLICY_UNSPECIFIED (0): - Unspecified. - AS_SOON_AS_POSSIBLE (1): - Run Tasks as soon as resources are available. - - Tasks might be executed in parallel depending on parallelism - and task_count values. - IN_ORDER (2): - Run Tasks sequentially with increased task - index. - """ - SCHEDULING_POLICY_UNSPECIFIED = 0 - AS_SOON_AS_POSSIBLE = 1 - IN_ORDER = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - task_spec: task.TaskSpec = proto.Field( - proto.MESSAGE, - number=3, - message=task.TaskSpec, - ) - task_count: int = proto.Field( - proto.INT64, - number=4, - ) - parallelism: int = proto.Field( - proto.INT64, - number=5, - ) - scheduling_policy: SchedulingPolicy = proto.Field( - proto.ENUM, - number=6, - enum=SchedulingPolicy, - ) - allocation_policy: 'AllocationPolicy' = proto.Field( - proto.MESSAGE, - number=7, - message='AllocationPolicy', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=8, - ) - task_environments: MutableSequence[task.Environment] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message=task.Environment, - ) - task_count_per_node: int = proto.Field( - proto.INT64, - number=10, - ) - require_hosts_file: bool = proto.Field( - proto.BOOL, - number=11, - ) - permissive_ssh: bool = proto.Field( - proto.BOOL, - number=12, - ) - run_as_non_root: bool = proto.Field( - proto.BOOL, - number=14, - ) - service_account: 'ServiceAccount' = proto.Field( - proto.MESSAGE, - number=15, - message='ServiceAccount', - ) - - -class ServiceAccount(proto.Message): - r"""Carries information about a Google Cloud service account. - - Attributes: - email (str): - Email address of the service account. - scopes (MutableSequence[str]): - List of scopes to be enabled for this service - account. - """ - - email: str = proto.Field( - proto.STRING, - number=1, - ) - scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/notification.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/notification.py deleted file mode 100644 index ca8d4830f2e1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/notification.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1alpha', - manifest={ - 'Notification', - }, -) - - -class Notification(proto.Message): - r"""Notification on resource state change. - - Attributes: - pubsub_topic (str): - Required. The Pub/Sub topic where notifications like the - resource allowance state changes will be published. The - topic must exist in the same project as the job and billings - will be charged to this project. If not specified, no - Pub/Sub messages will be sent. Topic format: - ``projects/{project}/topics/{topic}``. - """ - - pubsub_topic: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/resource_allowance.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/resource_allowance.py deleted file mode 100644 index 9b0f4c833241..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/resource_allowance.py +++ /dev/null @@ -1,362 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1alpha.types import notification -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import interval_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1alpha', - manifest={ - 'CalendarPeriod', - 'ResourceAllowanceState', - 'ResourceAllowance', - 'UsageResourceAllowance', - 'UsageResourceAllowanceSpec', - 'UsageResourceAllowanceStatus', - }, -) - - -class CalendarPeriod(proto.Enum): - r"""A ``CalendarPeriod`` represents the abstract concept of a time - period that has a canonical start. All calendar times begin at 12 AM - US and Canadian Pacific Time (UTC-8). - - Values: - CALENDAR_PERIOD_UNSPECIFIED (0): - Unspecified. - MONTH (1): - The month starts on the first date of the - month and resets at the beginning of each month. - QUARTER (2): - The quarter starts on dates January 1, April - 1, July 1, and October 1 of each year and resets - at the beginning of the next quarter. - YEAR (3): - The year starts on January 1 and resets at - the beginning of the next year. - WEEK (4): - The week period starts and resets every - Monday. - DAY (5): - The day starts at 12:00am. - """ - CALENDAR_PERIOD_UNSPECIFIED = 0 - MONTH = 1 - QUARTER = 2 - YEAR = 3 - WEEK = 4 - DAY = 5 - - -class ResourceAllowanceState(proto.Enum): - r"""ResourceAllowance valid state. - - Values: - RESOURCE_ALLOWANCE_STATE_UNSPECIFIED (0): - Unspecified. - RESOURCE_ALLOWANCE_ACTIVE (1): - ResourceAllowance is active and in use. - RESOURCE_ALLOWANCE_DEPLETED (2): - ResourceAllowance limit is reached. - """ - RESOURCE_ALLOWANCE_STATE_UNSPECIFIED = 0 - RESOURCE_ALLOWANCE_ACTIVE = 1 - RESOURCE_ALLOWANCE_DEPLETED = 2 - - -class ResourceAllowance(proto.Message): - r"""The Resource Allowance description for Cloud Batch. - Only one Resource Allowance is supported now under a specific - location and project. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - usage_resource_allowance (google.cloud.batch_v1alpha.types.UsageResourceAllowance): - The detail of usage resource allowance. - - This field is a member of `oneof`_ ``resource_allowance``. - name (str): - Identifier. ResourceAllowance name. - For example: - - "projects/123456/locations/us-central1/resourceAllowances/resource-allowance-1". - uid (str): - Output only. A system generated unique ID (in - UUID4 format) for the ResourceAllowance. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when the ResourceAllowance - was created. - labels (MutableMapping[str, str]): - Optional. Labels are attributes that can be set and used by - both the user and by Batch. Labels must meet the following - constraints: - - - Keys and values can contain only lowercase letters, - numeric characters, underscores, and dashes. - - All characters must use UTF-8 encoding, and international - characters are allowed. - - Keys must start with a lowercase letter or international - character. - - Each resource is limited to a maximum of 64 labels. - - Both keys and values are additionally constrained to be <= - 128 bytes. - notifications (MutableSequence[google.cloud.batch_v1alpha.types.Notification]): - Optional. Notification configurations. - """ - - usage_resource_allowance: 'UsageResourceAllowance' = proto.Field( - proto.MESSAGE, - number=4, - oneof='resource_allowance', - message='UsageResourceAllowance', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - uid: str = proto.Field( - proto.STRING, - number=2, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - notifications: MutableSequence[notification.Notification] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=notification.Notification, - ) - - -class UsageResourceAllowance(proto.Message): - r"""UsageResourceAllowance describes the detail of usage resource - allowance. - - Attributes: - spec (google.cloud.batch_v1alpha.types.UsageResourceAllowanceSpec): - Required. Spec of a usage ResourceAllowance. - status (google.cloud.batch_v1alpha.types.UsageResourceAllowanceStatus): - Output only. Status of a usage - ResourceAllowance. - """ - - spec: 'UsageResourceAllowanceSpec' = proto.Field( - proto.MESSAGE, - number=1, - message='UsageResourceAllowanceSpec', - ) - status: 'UsageResourceAllowanceStatus' = proto.Field( - proto.MESSAGE, - number=2, - message='UsageResourceAllowanceStatus', - ) - - -class UsageResourceAllowanceSpec(proto.Message): - r"""Spec of a usage ResourceAllowance. - - Attributes: - type_ (str): - Required. Spec type is unique for each usage - ResourceAllowance. Batch now only supports type - as "cpu-core-hours" for CPU usage consumption - tracking. - limit (google.cloud.batch_v1alpha.types.UsageResourceAllowanceSpec.Limit): - Required. Threshold of a - UsageResourceAllowance limiting how many - resources can be consumed for each type. - """ - - class Limit(proto.Message): - r"""UsageResourceAllowance limitation. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - calendar_period (google.cloud.batch_v1alpha.types.CalendarPeriod): - Optional. A CalendarPeriod represents the - abstract concept of a time period that has a - canonical start. - - This field is a member of `oneof`_ ``duration``. - limit (float): - Required. Limit value of a UsageResourceAllowance within its - one duration. - - Limit cannot be a negative value. Default is 0. For example, - you can set ``limit`` as 10000.0 with duration of the - current month by setting ``calendar_period`` field as - monthly. That means in your current month, 10000.0 is the - core hour limitation that your resources are allowed to - consume. - - This field is a member of `oneof`_ ``_limit``. - """ - - calendar_period: 'CalendarPeriod' = proto.Field( - proto.ENUM, - number=1, - oneof='duration', - enum='CalendarPeriod', - ) - limit: float = proto.Field( - proto.DOUBLE, - number=2, - optional=True, - ) - - type_: str = proto.Field( - proto.STRING, - number=1, - ) - limit: Limit = proto.Field( - proto.MESSAGE, - number=2, - message=Limit, - ) - - -class UsageResourceAllowanceStatus(proto.Message): - r"""Status of a usage ResourceAllowance. - - Attributes: - state (google.cloud.batch_v1alpha.types.ResourceAllowanceState): - Output only. ResourceAllowance state. - limit_status (google.cloud.batch_v1alpha.types.UsageResourceAllowanceStatus.LimitStatus): - Output only. ResourceAllowance consumption - status for usage resources. - report (google.cloud.batch_v1alpha.types.UsageResourceAllowanceStatus.ConsumptionReport): - Output only. The report of ResourceAllowance - consumptions in a time period. - """ - - class LimitStatus(proto.Message): - r"""UsageResourceAllowanceStatus detail about usage consumption. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - consumption_interval (google.type.interval_pb2.Interval): - Output only. The consumption interval. - limit (float): - Output only. Limit value of a - UsageResourceAllowance within its one duration. - - This field is a member of `oneof`_ ``_limit``. - consumed (float): - Output only. Accumulated consumption during - ``consumption_interval``. - - This field is a member of `oneof`_ ``_consumed``. - """ - - consumption_interval: interval_pb2.Interval = proto.Field( - proto.MESSAGE, - number=1, - message=interval_pb2.Interval, - ) - limit: float = proto.Field( - proto.DOUBLE, - number=2, - optional=True, - ) - consumed: float = proto.Field( - proto.DOUBLE, - number=3, - optional=True, - ) - - class PeriodConsumption(proto.Message): - r""" - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - consumption_interval (google.type.interval_pb2.Interval): - Output only. The consumption interval. - consumed (float): - Output only. Accumulated consumption during - ``consumption_interval``. - - This field is a member of `oneof`_ ``_consumed``. - """ - - consumption_interval: interval_pb2.Interval = proto.Field( - proto.MESSAGE, - number=1, - message=interval_pb2.Interval, - ) - consumed: float = proto.Field( - proto.DOUBLE, - number=2, - optional=True, - ) - - class ConsumptionReport(proto.Message): - r"""ConsumptionReport is the report of ResourceAllowance - consumptions in a time period. - - Attributes: - latest_period_consumptions (MutableMapping[str, google.cloud.batch_v1alpha.types.UsageResourceAllowanceStatus.PeriodConsumption]): - Output only. ResourceAllowance consumptions - in the latest calendar period. Key is the - calendar period in string format. Batch - currently supports HOUR, DAY, MONTH and YEAR. - """ - - latest_period_consumptions: MutableMapping[str, 'UsageResourceAllowanceStatus.PeriodConsumption'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='UsageResourceAllowanceStatus.PeriodConsumption', - ) - - state: 'ResourceAllowanceState' = proto.Field( - proto.ENUM, - number=1, - enum='ResourceAllowanceState', - ) - limit_status: LimitStatus = proto.Field( - proto.MESSAGE, - number=2, - message=LimitStatus, - ) - report: ConsumptionReport = proto.Field( - proto.MESSAGE, - number=3, - message=ConsumptionReport, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/task.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/task.py deleted file mode 100644 index ef056ce23e80..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/task.py +++ /dev/null @@ -1,849 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.batch_v1alpha.types import volume -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1alpha', - manifest={ - 'ComputeResource', - 'StatusEvent', - 'TaskExecution', - 'TaskStatus', - 'TaskResourceUsage', - 'Runnable', - 'TaskSpec', - 'LifecyclePolicy', - 'Task', - 'Environment', - }, -) - - -class ComputeResource(proto.Message): - r"""Compute resource requirements. - - ComputeResource defines the amount of resources required for each - task. Make sure your tasks have enough resources to successfully - run. If you also define the types of resources for a job to use with - the - `InstancePolicyOrTemplate `__ - field, make sure both fields are compatible with each other. - - Attributes: - cpu_milli (int): - The milliCPU count. - - ``cpuMilli`` defines the amount of CPU resources per task in - milliCPU units. For example, ``1000`` corresponds to 1 vCPU - per task. If undefined, the default value is ``2000``. - - If you also define the VM's machine type using the - ``machineType`` in - `InstancePolicy `__ - field or inside the ``instanceTemplate`` in the - `InstancePolicyOrTemplate `__ - field, make sure the CPU resources for both fields are - compatible with each other and with how many tasks you want - to allow to run on the same VM at the same time. - - For example, if you specify the ``n2-standard-2`` machine - type, which has 2 vCPUs each, you are recommended to set - ``cpuMilli`` no more than ``2000``, or you are recommended - to run two tasks on the same VM if you set ``cpuMilli`` to - ``1000`` or less. - memory_mib (int): - Memory in MiB. - - ``memoryMib`` defines the amount of memory per task in MiB - units. If undefined, the default value is ``2000``. If you - also define the VM's machine type using the ``machineType`` - in - `InstancePolicy `__ - field or inside the ``instanceTemplate`` in the - `InstancePolicyOrTemplate `__ - field, make sure the memory resources for both fields are - compatible with each other and with how many tasks you want - to allow to run on the same VM at the same time. - - For example, if you specify the ``n2-standard-2`` machine - type, which has 8 GiB each, you are recommended to set - ``memoryMib`` to no more than ``8192``, or you are - recommended to run two tasks on the same VM if you set - ``memoryMib`` to ``4096`` or less. - gpu_count (int): - The GPU count. - - Not yet implemented. - boot_disk_mib (int): - Extra boot disk size in MiB for each task. - """ - - cpu_milli: int = proto.Field( - proto.INT64, - number=1, - ) - memory_mib: int = proto.Field( - proto.INT64, - number=2, - ) - gpu_count: int = proto.Field( - proto.INT64, - number=3, - ) - boot_disk_mib: int = proto.Field( - proto.INT64, - number=4, - ) - - -class StatusEvent(proto.Message): - r"""Status event. - - Attributes: - type_ (str): - Type of the event. - description (str): - Description of the event. - event_time (google.protobuf.timestamp_pb2.Timestamp): - The time this event occurred. - task_execution (google.cloud.batch_v1alpha.types.TaskExecution): - Task Execution. - This field is only defined for task-level status - events where the task fails. - task_state (google.cloud.batch_v1alpha.types.TaskStatus.State): - Task State. - This field is only defined for task-level status - events. - """ - - type_: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=1, - ) - event_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - task_execution: 'TaskExecution' = proto.Field( - proto.MESSAGE, - number=4, - message='TaskExecution', - ) - task_state: 'TaskStatus.State' = proto.Field( - proto.ENUM, - number=5, - enum='TaskStatus.State', - ) - - -class TaskExecution(proto.Message): - r"""This Task Execution field includes detail information for - task execution procedures, based on StatusEvent types. - - Attributes: - exit_code (int): - The exit code of a finished task. - - If the task succeeded, the exit code will be 0. If the task - failed but not due to the following reasons, the exit code - will be 50000. - - Otherwise, it can be from different sources: - - - Batch known failures: - https://cloud.google.com/batch/docs/troubleshooting#reserved-exit-codes. - - Batch runnable execution failures; you can rely on Batch - logs to further diagnose: - https://cloud.google.com/batch/docs/analyze-job-using-logs. - If there are multiple runnables failures, Batch only - exposes the first error. - stderr_snippet (str): - Optional. The tail end of any content written - to standard error by the task execution. This - field will be populated only when the execution - failed. - """ - - exit_code: int = proto.Field( - proto.INT32, - number=1, - ) - stderr_snippet: str = proto.Field( - proto.STRING, - number=2, - ) - - -class TaskStatus(proto.Message): - r"""Status of a task. - - Attributes: - state (google.cloud.batch_v1alpha.types.TaskStatus.State): - Task state. - status_events (MutableSequence[google.cloud.batch_v1alpha.types.StatusEvent]): - Detailed info about why the state is reached. - resource_usage (google.cloud.batch_v1alpha.types.TaskResourceUsage): - The resource usage of the task. - """ - class State(proto.Enum): - r"""Task states. - - Values: - STATE_UNSPECIFIED (0): - Unknown state. - PENDING (1): - The Task is created and waiting for - resources. - ASSIGNED (2): - The Task is assigned to at least one VM. - RUNNING (3): - The Task is running. - FAILED (4): - The Task has failed. - SUCCEEDED (5): - The Task has succeeded. - UNEXECUTED (6): - The Task has not been executed when the Job - finishes. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - ASSIGNED = 2 - RUNNING = 3 - FAILED = 4 - SUCCEEDED = 5 - UNEXECUTED = 6 - - state: State = proto.Field( - proto.ENUM, - number=1, - enum=State, - ) - status_events: MutableSequence['StatusEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='StatusEvent', - ) - resource_usage: 'TaskResourceUsage' = proto.Field( - proto.MESSAGE, - number=3, - message='TaskResourceUsage', - ) - - -class TaskResourceUsage(proto.Message): - r"""TaskResourceUsage describes the resource usage of the task. - - Attributes: - core_hours (float): - The CPU core hours the task consumes based on - task requirement and run time. - """ - - core_hours: float = proto.Field( - proto.DOUBLE, - number=1, - ) - - -class Runnable(proto.Message): - r"""Runnable describes instructions for executing a specific - script or container as part of a Task. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - container (google.cloud.batch_v1alpha.types.Runnable.Container): - Container runnable. - - This field is a member of `oneof`_ ``executable``. - script (google.cloud.batch_v1alpha.types.Runnable.Script): - Script runnable. - - This field is a member of `oneof`_ ``executable``. - barrier (google.cloud.batch_v1alpha.types.Runnable.Barrier): - Barrier runnable. - - This field is a member of `oneof`_ ``executable``. - display_name (str): - Optional. DisplayName is an optional field - that can be provided by the caller. If provided, - it will be used in logs and other outputs to - identify the script, making it easier for users - to understand the logs. If not provided the - index of the runnable will be used for outputs. - ignore_exit_status (bool): - Normally, a runnable that returns a non-zero exit status - fails and causes the task to fail. However, you can set this - field to ``true`` to allow the task to continue executing - its other runnables even if this runnable fails. - background (bool): - Normally, a runnable that doesn't exit causes its task to - fail. However, you can set this field to ``true`` to - configure a background runnable. Background runnables are - allowed continue running in the background while the task - executes subsequent runnables. For example, background - runnables are useful for providing services to other - runnables or providing debugging-support tools like SSH - servers. - - Specifically, background runnables are killed automatically - (if they have not already exited) a short time after all - foreground runnables have completed. Even though this is - likely to result in a non-zero exit status for the - background runnable, these automatic kills are not treated - as task failures. - always_run (bool): - By default, after a Runnable fails, no further Runnable are - executed. This flag indicates that this Runnable must be run - even if the Task has already failed. This is useful for - Runnables that copy output files off of the VM or for - debugging. - - The always_run flag does not override the Task's overall - max_run_duration. If the max_run_duration has expired then - no further Runnables will execute, not even always_run - Runnables. - environment (google.cloud.batch_v1alpha.types.Environment): - Environment variables for this Runnable - (overrides variables set for the whole Task or - TaskGroup). - timeout (google.protobuf.duration_pb2.Duration): - Timeout for this Runnable. - labels (MutableMapping[str, str]): - Labels for this Runnable. - """ - - class Container(proto.Message): - r"""Container runnable. - - Attributes: - image_uri (str): - Required. The URI to pull the container image - from. - commands (MutableSequence[str]): - Required for some container images. Overrides the ``CMD`` - specified in the container. If there is an ``ENTRYPOINT`` - (either in the container image or with the ``entrypoint`` - field below) then these commands are appended as arguments - to the ``ENTRYPOINT``. - entrypoint (str): - Required for some container images. Overrides the - ``ENTRYPOINT`` specified in the container. - volumes (MutableSequence[str]): - Volumes to mount (bind mount) from the host machine files or - directories into the container, formatted to match - ``--volume`` option for the ``docker run`` command—for - example, ``/foo:/bar`` or ``/foo:/bar:ro``. - - If the ``TaskSpec.Volumes`` field is specified but this - field is not, Batch will mount each volume from the host - machine to the container with the same mount path by - default. In this case, the default mount option for - containers will be read-only (``ro``) for existing - persistent disks and read-write (``rw``) for other volume - types, regardless of the original mount options specified in - ``TaskSpec.Volumes``. If you need different mount settings, - you can explicitly configure them in this field. - options (str): - Required for some container images. Arbitrary additional - options to include in the ``docker run`` command when - running this container—for example, ``--network host``. For - the ``--volume`` option, use the ``volumes`` field for the - container. - block_external_network (bool): - If set to true, external network access to and from - container will be blocked, containers that are with - block_external_network as true can still communicate with - each other, network cannot be specified in the - ``container.options`` field. - username (str): - Required if the container image is from a private Docker - registry. The username to login to the Docker registry that - contains the image. - - You can either specify the username directly by using plain - text or specify an encrypted username by using a Secret - Manager secret: ``projects/*/secrets/*/versions/*``. - However, using a secret is recommended for enhanced - security. - - Caution: If you specify the username using plain text, you - risk the username being exposed to any users who can view - the job or its logs. To avoid this risk, specify a secret - that contains the username instead. - - Learn more about `Secret - Manager `__ - and `using Secret Manager with - Batch `__. - password (str): - Required if the container image is from a private Docker - registry. The password to login to the Docker registry that - contains the image. - - For security, it is strongly recommended to specify an - encrypted password by using a Secret Manager secret: - ``projects/*/secrets/*/versions/*``. - - Warning: If you specify the password using plain text, you - risk the password being exposed to any users who can view - the job or its logs. To avoid this risk, specify a secret - that contains the password instead. - - Learn more about `Secret - Manager `__ - and `using Secret Manager with - Batch `__. - enable_image_streaming (bool): - Optional. If set to true, this container runnable uses Image - streaming. - - Use Image streaming to allow the runnable to initialize - without waiting for the entire container image to download, - which can significantly reduce startup time for large - container images. - - When ``enableImageStreaming`` is set to true, the container - runtime is `containerd `__ instead - of Docker. Additionally, this container runnable only - supports the following ``container`` subfields: - ``imageUri``, ``commands[]``, ``entrypoint``, and - ``volumes[]``; any other ``container`` subfields are - ignored. - - For more information about the requirements and limitations - for using Image streaming with Batch, see the - ```image-streaming`` sample on - GitHub `__. - """ - - image_uri: str = proto.Field( - proto.STRING, - number=1, - ) - commands: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - entrypoint: str = proto.Field( - proto.STRING, - number=3, - ) - volumes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - options: str = proto.Field( - proto.STRING, - number=8, - ) - block_external_network: bool = proto.Field( - proto.BOOL, - number=9, - ) - username: str = proto.Field( - proto.STRING, - number=10, - ) - password: str = proto.Field( - proto.STRING, - number=11, - ) - enable_image_streaming: bool = proto.Field( - proto.BOOL, - number=12, - ) - - class Script(proto.Message): - r"""Script runnable. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - path (str): - The path to a script file that is accessible from the host - VM(s). - - Unless the script file supports the default ``#!/bin/sh`` - shell interpreter, you must specify an interpreter by - including a [shebang - line](https://en.wikipedia.org/wiki/Shebang_(Unix) as the - first line of the file. For example, to execute the script - using bash, include ``#!/bin/bash`` as the first line of the - file. Alternatively, to execute the script using Python3, - include ``#!/usr/bin/env python3`` as the first line of the - file. - - This field is a member of `oneof`_ ``command``. - text (str): - The text for a script. - - Unless the script text supports the default ``#!/bin/sh`` - shell interpreter, you must specify an interpreter by - including a [shebang - line](https://en.wikipedia.org/wiki/Shebang_(Unix) at the - beginning of the text. For example, to execute the script - using bash, include ``#!/bin/bash\n`` at the beginning of - the text. Alternatively, to execute the script using - Python3, include ``#!/usr/bin/env python3\n`` at the - beginning of the text. - - This field is a member of `oneof`_ ``command``. - """ - - path: str = proto.Field( - proto.STRING, - number=1, - oneof='command', - ) - text: str = proto.Field( - proto.STRING, - number=2, - oneof='command', - ) - - class Barrier(proto.Message): - r"""A barrier runnable automatically blocks the execution of - subsequent runnables until all the tasks in the task group reach - the barrier. - - Attributes: - name (str): - Barriers are identified by their index in - runnable list. Names are not required, but if - present should be an identifier. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - container: Container = proto.Field( - proto.MESSAGE, - number=1, - oneof='executable', - message=Container, - ) - script: Script = proto.Field( - proto.MESSAGE, - number=2, - oneof='executable', - message=Script, - ) - barrier: Barrier = proto.Field( - proto.MESSAGE, - number=6, - oneof='executable', - message=Barrier, - ) - display_name: str = proto.Field( - proto.STRING, - number=10, - ) - ignore_exit_status: bool = proto.Field( - proto.BOOL, - number=3, - ) - background: bool = proto.Field( - proto.BOOL, - number=4, - ) - always_run: bool = proto.Field( - proto.BOOL, - number=5, - ) - environment: 'Environment' = proto.Field( - proto.MESSAGE, - number=7, - message='Environment', - ) - timeout: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=9, - ) - - -class TaskSpec(proto.Message): - r"""Spec of a task - - Attributes: - runnables (MutableSequence[google.cloud.batch_v1alpha.types.Runnable]): - Required. The sequence of one or more runnables (executable - scripts, executable containers, and/or barriers) for each - task in this task group to run. Each task runs this list of - runnables in order. For a task to succeed, all of its script - and container runnables each must meet at least one of the - following conditions: - - - The runnable exited with a zero status. - - The runnable didn't finish, but you enabled its - ``background`` subfield. - - The runnable exited with a non-zero status, but you - enabled its ``ignore_exit_status`` subfield. - compute_resource (google.cloud.batch_v1alpha.types.ComputeResource): - ComputeResource requirements. - max_run_duration (google.protobuf.duration_pb2.Duration): - Maximum duration the task should run before being - automatically retried (if enabled) or automatically failed. - Format the value of this field as a time limit in seconds - followed by ``s``—for example, ``3600s`` for 1 hour. The - field accepts any value between 0 and the maximum listed for - the ``Duration`` field type at - https://protobuf.dev/reference/protobuf/google.protobuf/#duration; - however, the actual maximum run time for a job will be - limited to the maximum run time for a job listed at - https://cloud.google.com/batch/quotas#max-job-duration. - max_retry_count (int): - Maximum number of retries on failures. The default, 0, which - means never retry. The valid value range is [0, 10]. - lifecycle_policies (MutableSequence[google.cloud.batch_v1alpha.types.LifecyclePolicy]): - Lifecycle management schema when any task in a task group is - failed. Currently we only support one lifecycle policy. When - the lifecycle policy condition is met, the action in the - policy will execute. If task execution result does not meet - with the defined lifecycle policy, we consider it as the - default policy. Default policy means if the exit code is 0, - exit task. If task ends with non-zero exit code, retry the - task with max_retry_count. - environments (MutableMapping[str, str]): - Deprecated: please use - environment(non-plural) instead. - volumes (MutableSequence[google.cloud.batch_v1alpha.types.Volume]): - Volumes to mount before running Tasks using - this TaskSpec. - environment (google.cloud.batch_v1alpha.types.Environment): - Environment variables to set before running - the Task. - """ - - runnables: MutableSequence['Runnable'] = proto.RepeatedField( - proto.MESSAGE, - number=8, - message='Runnable', - ) - compute_resource: 'ComputeResource' = proto.Field( - proto.MESSAGE, - number=3, - message='ComputeResource', - ) - max_run_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - max_retry_count: int = proto.Field( - proto.INT32, - number=5, - ) - lifecycle_policies: MutableSequence['LifecyclePolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='LifecyclePolicy', - ) - environments: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - volumes: MutableSequence[volume.Volume] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=volume.Volume, - ) - environment: 'Environment' = proto.Field( - proto.MESSAGE, - number=10, - message='Environment', - ) - - -class LifecyclePolicy(proto.Message): - r"""LifecyclePolicy describes how to deal with task failures - based on different conditions. - - Attributes: - action (google.cloud.batch_v1alpha.types.LifecyclePolicy.Action): - Action to execute when ActionCondition is true. When - RETRY_TASK is specified, we will retry failed tasks if we - notice any exit code match and fail tasks if no match is - found. Likewise, when FAIL_TASK is specified, we will fail - tasks if we notice any exit code match and retry tasks if no - match is found. - action_condition (google.cloud.batch_v1alpha.types.LifecyclePolicy.ActionCondition): - Conditions that decide why a task failure is - dealt with a specific action. - """ - class Action(proto.Enum): - r"""Action on task failures based on different conditions. - - Values: - ACTION_UNSPECIFIED (0): - Action unspecified. - RETRY_TASK (1): - Action that tasks in the group will be - scheduled to re-execute. - FAIL_TASK (2): - Action that tasks in the group will be - stopped immediately. - """ - ACTION_UNSPECIFIED = 0 - RETRY_TASK = 1 - FAIL_TASK = 2 - - class ActionCondition(proto.Message): - r"""Conditions for actions to deal with task failures. - - Attributes: - exit_codes (MutableSequence[int]): - Exit codes of a task execution. - If there are more than 1 exit codes, - when task executes with any of the exit code in - the list, the condition is met and the action - will be executed. - """ - - exit_codes: MutableSequence[int] = proto.RepeatedField( - proto.INT32, - number=1, - ) - - action: Action = proto.Field( - proto.ENUM, - number=1, - enum=Action, - ) - action_condition: ActionCondition = proto.Field( - proto.MESSAGE, - number=2, - message=ActionCondition, - ) - - -class Task(proto.Message): - r"""A Cloud Batch task. - - Attributes: - name (str): - Task name. - The name is generated from the parent TaskGroup - name and 'id' field. For example: - - "projects/123456/locations/us-west1/jobs/job01/taskGroups/group01/tasks/task01". - status (google.cloud.batch_v1alpha.types.TaskStatus): - Task Status. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - status: 'TaskStatus' = proto.Field( - proto.MESSAGE, - number=2, - message='TaskStatus', - ) - - -class Environment(proto.Message): - r"""An Environment describes a collection of environment - variables to set when executing Tasks. - - Attributes: - variables (MutableMapping[str, str]): - A map of environment variable names to - values. - secret_variables (MutableMapping[str, str]): - A map of environment variable names to Secret - Manager secret names. The VM will access the - named secrets to set the value of each - environment variable. - encrypted_variables (google.cloud.batch_v1alpha.types.Environment.KMSEnvMap): - An encrypted JSON dictionary where the - key/value pairs correspond to environment - variable names and their values. - """ - - class KMSEnvMap(proto.Message): - r""" - - Attributes: - key_name (str): - The name of the KMS key that will be used to - decrypt the cipher text. - cipher_text (str): - The value of the cipherText response from the ``encrypt`` - method. - """ - - key_name: str = proto.Field( - proto.STRING, - number=1, - ) - cipher_text: str = proto.Field( - proto.STRING, - number=2, - ) - - variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - secret_variables: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - encrypted_variables: KMSEnvMap = proto.Field( - proto.MESSAGE, - number=3, - message=KMSEnvMap, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/volume.py b/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/volume.py deleted file mode 100644 index a02787a5927e..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/google/cloud/batch_v1alpha/types/volume.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.batch.v1alpha', - manifest={ - 'Volume', - 'NFS', - 'PD', - 'GCS', - }, -) - - -class Volume(proto.Message): - r"""Volume describes a volume and parameters for it to be mounted - to a VM. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - nfs (google.cloud.batch_v1alpha.types.NFS): - A Network File System (NFS) volume. For - example, a Filestore file share. - - This field is a member of `oneof`_ ``source``. - pd (google.cloud.batch_v1alpha.types.PD): - Deprecated: please use device_name instead. - - This field is a member of `oneof`_ ``source``. - gcs (google.cloud.batch_v1alpha.types.GCS): - A Google Cloud Storage (GCS) volume. - - This field is a member of `oneof`_ ``source``. - device_name (str): - Device name of an attached disk volume, which should align - with a device_name specified by - job.allocation_policy.instances[0].policy.disks[i].device_name - or defined by the given instance template in - job.allocation_policy.instances[0].instance_template. - - This field is a member of `oneof`_ ``source``. - mount_path (str): - The mount path for the volume, e.g. - /mnt/disks/share. - mount_options (MutableSequence[str]): - Mount options vary based on the type of storage volume: - - - For a Cloud Storage bucket, all the mount options - provided by the ```gcsfuse`` - tool `__ - are supported. - - For an existing persistent disk, all mount options - provided by the ```mount`` - command `__ - except writing are supported. This is due to restrictions - of `multi-writer - mode `__. - - For any other disk or a Network File System (NFS), all - the mount options provided by the ``mount`` command are - supported. - """ - - nfs: 'NFS' = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='NFS', - ) - pd: 'PD' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='PD', - ) - gcs: 'GCS' = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='GCS', - ) - device_name: str = proto.Field( - proto.STRING, - number=6, - oneof='source', - ) - mount_path: str = proto.Field( - proto.STRING, - number=4, - ) - mount_options: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class NFS(proto.Message): - r"""Represents an NFS volume. - - Attributes: - server (str): - The IP address of the NFS. - remote_path (str): - Remote source path exported from the NFS, - e.g., "/share". - """ - - server: str = proto.Field( - proto.STRING, - number=1, - ) - remote_path: str = proto.Field( - proto.STRING, - number=2, - ) - - -class PD(proto.Message): - r"""Deprecated: please use device_name instead. - - Attributes: - disk (str): - PD disk name, e.g. pd-1. - device (str): - PD device name, e.g. persistent-disk-1. - existing (bool): - Whether this is an existing PD. Default is - false. If false, i.e., new PD, we will format it - into ext4 and mount to the given path. If true, - i.e., existing PD, it should be in ext4 format - and we will mount it to the given path. - """ - - disk: str = proto.Field( - proto.STRING, - number=1, - ) - device: str = proto.Field( - proto.STRING, - number=2, - ) - existing: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class GCS(proto.Message): - r"""Represents a Google Cloud Storage volume. - - Attributes: - remote_path (str): - Remote path, either a bucket name or a subdirectory of a - bucket, e.g.: bucket_name, bucket_name/subdirectory/ - """ - - remote_path: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/mypy.ini b/owl-bot-staging/google-cloud-batch/v1alpha/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/noxfile.py b/owl-bot-staging/google-cloud-batch/v1alpha/noxfile.py deleted file mode 100644 index c9ea4a40982f..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-batch' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/batch_v1alpha/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/batch_v1alpha/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py deleted file mode 100644 index 19d634ba25bd..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CancelJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_cancel_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CancelJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py deleted file mode 100644 index 4661b4b1b295..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_cancel_job_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CancelJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CancelJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_cancel_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.CancelJobRequest( - name="name_value", - ) - - # Make the request - operation = client.cancel_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CancelJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_async.py deleted file mode 100644 index ae3cfc9a6fd5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CreateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_create_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CreateJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync.py deleted file mode 100644 index 23e4f2bc1da0..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CreateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_create_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.CreateJobRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CreateJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_resource_allowance_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_resource_allowance_async.py deleted file mode 100644 index de1fd238537d..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_resource_allowance_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CreateResourceAllowance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_create_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.CreateResourceAllowanceRequest( - parent="parent_value", - resource_allowance=resource_allowance, - ) - - # Make the request - response = await client.create_resource_allowance(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CreateResourceAllowance_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_resource_allowance_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_resource_allowance_sync.py deleted file mode 100644 index 7a63cc8538c5..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_create_resource_allowance_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_CreateResourceAllowance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_create_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.CreateResourceAllowanceRequest( - parent="parent_value", - resource_allowance=resource_allowance, - ) - - # Make the request - response = client.create_resource_allowance(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_CreateResourceAllowance_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py deleted file mode 100644 index 11ab7260e98d..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_DeleteJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_delete_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_DeleteJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync.py deleted file mode 100644 index 7f1cd20398a3..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_job_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_DeleteJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_delete_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteJobRequest( - ) - - # Make the request - operation = client.delete_job(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_DeleteJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_resource_allowance_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_resource_allowance_async.py deleted file mode 100644 index 99de164d1c71..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_resource_allowance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_DeleteResourceAllowance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_delete_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_resource_allowance(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_DeleteResourceAllowance_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_resource_allowance_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_resource_allowance_sync.py deleted file mode 100644 index 9cb35ba08915..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_delete_resource_allowance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_DeleteResourceAllowance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_delete_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.DeleteResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_resource_allowance(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_DeleteResourceAllowance_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_async.py deleted file mode 100644 index f0730ebb2b10..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_get_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync.py deleted file mode 100644 index 344bf71a8214..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_resource_allowance_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_resource_allowance_async.py deleted file mode 100644 index 6a924ec6a1e2..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_resource_allowance_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetResourceAllowance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_get_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_resource_allowance(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetResourceAllowance_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_resource_allowance_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_resource_allowance_sync.py deleted file mode 100644 index b824ed04c9ed..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_resource_allowance_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetResourceAllowance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetResourceAllowanceRequest( - name="name_value", - ) - - # Make the request - response = client.get_resource_allowance(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetResourceAllowance_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_async.py deleted file mode 100644 index c577d7bbafcc..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetTask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_get_task(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetTask_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync.py deleted file mode 100644 index e01dda66c87c..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_get_task_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_GetTask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_get_task(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.GetTaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_task(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_GetTask_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_async.py deleted file mode 100644 index 4fe98533b675..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_list_jobs(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListJobs_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync.py deleted file mode 100644 index b458eb7fd693..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_jobs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_jobs(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListJobs_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_resource_allowances_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_resource_allowances_async.py deleted file mode 100644 index 53096768485b..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_resource_allowances_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListResourceAllowances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListResourceAllowances_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_list_resource_allowances(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListResourceAllowancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_resource_allowances(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListResourceAllowances_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_resource_allowances_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_resource_allowances_sync.py deleted file mode 100644 index 1d97bddf54d3..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_resource_allowances_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListResourceAllowances -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListResourceAllowances_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_resource_allowances(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListResourceAllowancesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_resource_allowances(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListResourceAllowances_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_async.py deleted file mode 100644 index 2b94c5fa7a88..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListTasks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_list_tasks(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListTasks_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync.py deleted file mode 100644 index 5830e66a551c..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_list_tasks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_ListTasks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_list_tasks(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.ListTasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END batch_v1alpha_generated_BatchService_ListTasks_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_job_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_job_async.py deleted file mode 100644 index e363ba9bd27b..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_job_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_UpdateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_update_job(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - request = batch_v1alpha.UpdateJobRequest( - ) - - # Make the request - response = await client.update_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_UpdateJob_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_job_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_job_sync.py deleted file mode 100644 index efd6cd934a62..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_job_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_UpdateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_update_job(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - request = batch_v1alpha.UpdateJobRequest( - ) - - # Make the request - response = client.update_job(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_UpdateJob_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_resource_allowance_async.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_resource_allowance_async.py deleted file mode 100644 index 92721bb550b1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_resource_allowance_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_UpdateResourceAllowance_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -async def sample_update_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceAsyncClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.UpdateResourceAllowanceRequest( - resource_allowance=resource_allowance, - ) - - # Make the request - response = await client.update_resource_allowance(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_UpdateResourceAllowance_async] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_resource_allowance_sync.py b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_resource_allowance_sync.py deleted file mode 100644 index a733152f44da..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/batch_v1alpha_generated_batch_service_update_resource_allowance_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateResourceAllowance -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-batch - - -# [START batch_v1alpha_generated_BatchService_UpdateResourceAllowance_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import batch_v1alpha - - -def sample_update_resource_allowance(): - # Create a client - client = batch_v1alpha.BatchServiceClient() - - # Initialize request argument(s) - resource_allowance = batch_v1alpha.ResourceAllowance() - resource_allowance.usage_resource_allowance.spec.type_ = "type__value" - resource_allowance.usage_resource_allowance.spec.limit.calendar_period = "DAY" - - request = batch_v1alpha.UpdateResourceAllowanceRequest( - resource_allowance=resource_allowance, - ) - - # Make the request - response = client.update_resource_allowance(request=request) - - # Handle the response - print(response) - -# [END batch_v1alpha_generated_BatchService_UpdateResourceAllowance_sync] diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json deleted file mode 100644 index 981b9c62ced1..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ /dev/null @@ -1,2156 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.batch.v1alpha", - "version": "v1alpha" - } - ], - "language": "PYTHON", - "name": "google-cloud-batch", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.cancel_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CancelJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "batch_v1alpha_generated_batch_service_cancel_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CancelJob_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_cancel_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.cancel_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CancelJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CancelJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CancelJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "cancel_job" - }, - "description": "Sample for CancelJob", - "file": "batch_v1alpha_generated_batch_service_cancel_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CancelJob_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_cancel_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.create_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CreateJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.batch_v1alpha.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "batch_v1alpha_generated_batch_service_create_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CreateJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_create_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.create_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CreateJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.batch_v1alpha.types.Job" - }, - { - "name": "job_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "batch_v1alpha_generated_batch_service_create_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CreateJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_create_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.create_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CreateResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CreateResourceAllowanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "resource_allowance", - "type": "google.cloud.batch_v1alpha.types.ResourceAllowance" - }, - { - "name": "resource_allowance_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", - "shortName": "create_resource_allowance" - }, - "description": "Sample for CreateResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_create_resource_allowance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CreateResourceAllowance_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_create_resource_allowance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.create_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.CreateResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "CreateResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.CreateResourceAllowanceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "resource_allowance", - "type": "google.cloud.batch_v1alpha.types.ResourceAllowance" - }, - { - "name": "resource_allowance_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", - "shortName": "create_resource_allowance" - }, - "description": "Sample for CreateResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_create_resource_allowance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_CreateResourceAllowance_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_create_resource_allowance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.delete_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.DeleteJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "batch_v1alpha_generated_batch_service_delete_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_DeleteJob_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_delete_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.delete_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.DeleteJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "batch_v1alpha_generated_batch_service_delete_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_DeleteJob_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_delete_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.delete_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.DeleteResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.DeleteResourceAllowanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_resource_allowance" - }, - "description": "Sample for DeleteResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_delete_resource_allowance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_DeleteResourceAllowance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_delete_resource_allowance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.delete_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.DeleteResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "DeleteResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.DeleteResourceAllowanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_resource_allowance" - }, - "description": "Sample for DeleteResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_delete_resource_allowance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_DeleteResourceAllowance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_delete_resource_allowance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.get_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "batch_v1alpha_generated_batch_service_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.get_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "batch_v1alpha_generated_batch_service_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.get_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetResourceAllowanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", - "shortName": "get_resource_allowance" - }, - "description": "Sample for GetResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_get_resource_allowance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetResourceAllowance_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_resource_allowance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.get_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetResourceAllowanceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", - "shortName": "get_resource_allowance" - }, - "description": "Sample for GetResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_get_resource_allowance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetResourceAllowance_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_resource_allowance_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.get_task", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetTask", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "batch_v1alpha_generated_batch_service_get_task_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetTask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_task_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.get_task", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.GetTask", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "GetTask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.GetTaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Task", - "shortName": "get_task" - }, - "description": "Sample for GetTask", - "file": "batch_v1alpha_generated_batch_service_get_task_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_GetTask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_get_task_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListJobs", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "batch_v1alpha_generated_batch_service_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListJobs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListJobs", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "batch_v1alpha_generated_batch_service_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.list_resource_allowances", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListResourceAllowances", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListResourceAllowances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListResourceAllowancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesAsyncPager", - "shortName": "list_resource_allowances" - }, - "description": "Sample for ListResourceAllowances", - "file": "batch_v1alpha_generated_batch_service_list_resource_allowances_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListResourceAllowances_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_resource_allowances_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.list_resource_allowances", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListResourceAllowances", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListResourceAllowances" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListResourceAllowancesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListResourceAllowancesPager", - "shortName": "list_resource_allowances" - }, - "description": "Sample for ListResourceAllowances", - "file": "batch_v1alpha_generated_batch_service_list_resource_allowances_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListResourceAllowances_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_resource_allowances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.list_tasks", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListTasks", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksAsyncPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "batch_v1alpha_generated_batch_service_list_tasks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListTasks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_tasks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.list_tasks", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.ListTasks", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "ListTasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.ListTasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.services.batch_service.pagers.ListTasksPager", - "shortName": "list_tasks" - }, - "description": "Sample for ListTasks", - "file": "batch_v1alpha_generated_batch_service_list_tasks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_ListTasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_list_tasks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.update_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.UpdateJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "UpdateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.UpdateJobRequest" - }, - { - "name": "job", - "type": "google.cloud.batch_v1alpha.types.Job" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "update_job" - }, - "description": "Sample for UpdateJob", - "file": "batch_v1alpha_generated_batch_service_update_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_UpdateJob_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_update_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.update_job", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.UpdateJob", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "UpdateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.UpdateJobRequest" - }, - { - "name": "job", - "type": "google.cloud.batch_v1alpha.types.Job" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.Job", - "shortName": "update_job" - }, - "description": "Sample for UpdateJob", - "file": "batch_v1alpha_generated_batch_service_update_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_UpdateJob_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_update_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient", - "shortName": "BatchServiceAsyncClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceAsyncClient.update_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.UpdateResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "UpdateResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.UpdateResourceAllowanceRequest" - }, - { - "name": "resource_allowance", - "type": "google.cloud.batch_v1alpha.types.ResourceAllowance" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", - "shortName": "update_resource_allowance" - }, - "description": "Sample for UpdateResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_update_resource_allowance_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_UpdateResourceAllowance_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_update_resource_allowance_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient", - "shortName": "BatchServiceClient" - }, - "fullName": "google.cloud.batch_v1alpha.BatchServiceClient.update_resource_allowance", - "method": { - "fullName": "google.cloud.batch.v1alpha.BatchService.UpdateResourceAllowance", - "service": { - "fullName": "google.cloud.batch.v1alpha.BatchService", - "shortName": "BatchService" - }, - "shortName": "UpdateResourceAllowance" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.batch_v1alpha.types.UpdateResourceAllowanceRequest" - }, - { - "name": "resource_allowance", - "type": "google.cloud.batch_v1alpha.types.ResourceAllowance" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.batch_v1alpha.types.ResourceAllowance", - "shortName": "update_resource_allowance" - }, - "description": "Sample for UpdateResourceAllowance", - "file": "batch_v1alpha_generated_batch_service_update_resource_allowance_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "batch_v1alpha_generated_BatchService_UpdateResourceAllowance_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "batch_v1alpha_generated_batch_service_update_resource_allowance_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/scripts/fixup_batch_v1alpha_keywords.py b/owl-bot-staging/google-cloud-batch/v1alpha/scripts/fixup_batch_v1alpha_keywords.py deleted file mode 100644 index 3b43e30b2119..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/scripts/fixup_batch_v1alpha_keywords.py +++ /dev/null @@ -1,188 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class batchCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_job': ('name', 'request_id', ), - 'create_job': ('parent', 'job', 'job_id', 'request_id', ), - 'create_resource_allowance': ('parent', 'resource_allowance', 'resource_allowance_id', 'request_id', ), - 'delete_job': ('name', 'reason', 'request_id', ), - 'delete_resource_allowance': ('name', 'reason', 'request_id', ), - 'get_job': ('name', ), - 'get_resource_allowance': ('name', ), - 'get_task': ('name', ), - 'list_jobs': ('parent', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_resource_allowances': ('parent', 'page_size', 'page_token', ), - 'list_tasks': ('parent', 'filter', 'order_by', 'page_size', 'page_token', ), - 'update_job': ('job', 'update_mask', 'request_id', ), - 'update_resource_allowance': ('resource_allowance', 'update_mask', 'request_id', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=batchCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the batch client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/setup.py b/owl-bot-staging/google-cloud-batch/v1alpha/setup.py deleted file mode 100644 index 9bbd8ec5aefa..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-batch' - - -description = "Google Cloud Batch API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/batch/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-batch" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/tests/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/batch_v1alpha/__init__.py b/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/batch_v1alpha/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/batch_v1alpha/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/batch_v1alpha/test_batch_service.py b/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/batch_v1alpha/test_batch_service.py deleted file mode 100644 index 5bc8f6bed7e9..000000000000 --- a/owl-bot-staging/google-cloud-batch/v1alpha/tests/unit/gapic/batch_v1alpha/test_batch_service.py +++ /dev/null @@ -1,12360 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.batch_v1alpha.services.batch_service import BatchServiceAsyncClient -from google.cloud.batch_v1alpha.services.batch_service import BatchServiceClient -from google.cloud.batch_v1alpha.services.batch_service import pagers -from google.cloud.batch_v1alpha.services.batch_service import transports -from google.cloud.batch_v1alpha.types import batch -from google.cloud.batch_v1alpha.types import job -from google.cloud.batch_v1alpha.types import job as gcb_job -from google.cloud.batch_v1alpha.types import notification -from google.cloud.batch_v1alpha.types import resource_allowance -from google.cloud.batch_v1alpha.types import resource_allowance as gcb_resource_allowance -from google.cloud.batch_v1alpha.types import task -from google.cloud.batch_v1alpha.types import volume -from google.cloud.location import locations_pb2 -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.type import interval_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BatchServiceClient._get_default_mtls_endpoint(None) is None - assert BatchServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BatchServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert BatchServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BatchServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BatchServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BatchServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BatchServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BatchServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BatchServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - BatchServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BatchServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert BatchServiceClient._get_client_cert_source(None, False) is None - assert BatchServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BatchServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BatchServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BatchServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = BatchServiceClient._DEFAULT_UNIVERSE - default_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert BatchServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BatchServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BatchServiceClient.DEFAULT_MTLS_ENDPOINT - assert BatchServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BatchServiceClient._get_api_endpoint(None, None, default_universe, "always") == BatchServiceClient.DEFAULT_MTLS_ENDPOINT - assert BatchServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BatchServiceClient.DEFAULT_MTLS_ENDPOINT - assert BatchServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BatchServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - BatchServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert BatchServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BatchServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BatchServiceClient._get_universe_domain(None, None) == BatchServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - BatchServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = BatchServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = BatchServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (BatchServiceClient, "grpc"), - (BatchServiceAsyncClient, "grpc_asyncio"), - (BatchServiceClient, "rest"), -]) -def test_batch_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'batch.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://batch.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BatchServiceGrpcTransport, "grpc"), - (transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BatchServiceRestTransport, "rest"), -]) -def test_batch_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BatchServiceClient, "grpc"), - (BatchServiceAsyncClient, "grpc_asyncio"), - (BatchServiceClient, "rest"), -]) -def test_batch_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'batch.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://batch.googleapis.com' - ) - - -def test_batch_service_client_get_transport_class(): - transport = BatchServiceClient.get_transport_class() - available_transports = [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceRestTransport, - ] - assert transport in available_transports - - transport = BatchServiceClient.get_transport_class("grpc") - assert transport == transports.BatchServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest"), -]) -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -def test_batch_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BatchServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BatchServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", "true"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", "false"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest", "true"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_batch_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BatchServiceClient, BatchServiceAsyncClient -]) -@mock.patch.object(BatchServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BatchServiceAsyncClient)) -def test_batch_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - BatchServiceClient, BatchServiceAsyncClient -]) -@mock.patch.object(BatchServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceClient)) -@mock.patch.object(BatchServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BatchServiceAsyncClient)) -def test_batch_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = BatchServiceClient._DEFAULT_UNIVERSE - default_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BatchServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc"), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest"), -]) -def test_batch_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", grpc_helpers), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BatchServiceClient, transports.BatchServiceRestTransport, "rest", None), -]) -def test_batch_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_batch_service_client_client_options_from_dict(): - with mock.patch('google.cloud.batch_v1alpha.services.batch_service.transports.BatchServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BatchServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport, "grpc", grpc_helpers), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_batch_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "batch.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="batch.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - batch.CreateJobRequest, - dict, -]) -def test_create_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.CreateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -def test_create_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.CreateJobRequest( - parent='parent_value', - job_id='job_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateJobRequest( - parent='parent_value', - job_id='job_id_value', - request_id='request_id_value', - ) - -def test_create_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc - request = {} - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_job] = mock_rpc - - request = {} - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=batch.CreateJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - )) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.CreateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - -def test_create_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = gcb_job.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = gcb_job.Job(name='name_value') - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - - -def test_create_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - batch.CreateJobRequest(), - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = gcb_job.Job(name='name_value') - assert arg == mock_val - arg = args[0].job_id - mock_val = 'job_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - batch.CreateJobRequest(), - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -def test_get_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetJobRequest( - name='name_value', - ) - -def test_get_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_job] = mock_rpc - - request = {} - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=batch.GetJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.GetJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - -def test_get_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = job.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = job.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = job.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - batch.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.DeleteJobRequest, - dict, -]) -def test_delete_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.DeleteJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.DeleteJobRequest( - name='name_value', - reason='reason_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteJobRequest( - name='name_value', - reason='reason_value', - request_id='request_id_value', - ) - -def test_delete_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc - request = {} - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_job] = mock_rpc - - request = {} - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=batch.DeleteJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.DeleteJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - -def test_delete_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - batch.DeleteJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.CancelJobRequest, - dict, -]) -def test_cancel_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_cancel_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.CancelJobRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.cancel_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.CancelJobRequest( - name='name_value', - ) - -def test_cancel_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.cancel_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.cancel_job] = mock_rpc - - request = {} - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_cancel_job_async(transport: str = 'grpc_asyncio', request_type=batch.CancelJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.CancelJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_cancel_job_async_from_dict(): - await test_cancel_job_async(request_type=dict) - -def test_cancel_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CancelJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_cancel_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_cancel_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - batch.CancelJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_cancel_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.cancel_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_cancel_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.cancel_job( - batch.CancelJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.UpdateJobRequest, - dict, -]) -def test_update_job(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - response = client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.UpdateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -def test_update_job_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.UpdateJobRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_job(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateJobRequest( - ) - -def test_update_job_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_job] = mock_rpc - request = {} - client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_job_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_job in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_job] = mock_rpc - - request = {} - await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_job_async(transport: str = 'grpc_asyncio', request_type=batch.UpdateJobRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - )) - response = await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.UpdateJobRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -@pytest.mark.asyncio -async def test_update_job_async_from_dict(): - await test_update_job_async(request_type=dict) - -def test_update_job_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.UpdateJobRequest() - - request.job.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = gcb_job.Job() - client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'job.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.UpdateJobRequest() - - request.job.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'job.name=name_value', - ) in kw['metadata'] - - -def test_update_job_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_job( - job=gcb_job.Job(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].job - mock_val = gcb_job.Job(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_job_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_job_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_job.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_job( - job=gcb_job.Job(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].job - mock_val = gcb_job.Job(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_job_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - batch.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_jobs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListJobsRequest( - parent='parent_value', - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_jobs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListJobsRequest( - parent='parent_value', - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', - ) - -def test_list_jobs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_jobs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_jobs] = mock_rpc - - request = {} - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=batch.ListJobsRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.ListJobsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - -def test_list_jobs_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = batch.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - batch.ListJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - batch.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, job.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, job.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_jobs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - batch.GetTaskRequest, - dict, -]) -def test_get_task(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task( - name='name_value', - ) - response = client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - - -def test_get_task_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetTaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_task(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetTaskRequest( - name='name_value', - ) - -def test_get_task_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_task in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_task] = mock_rpc - - request = {} - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_task_async(transport: str = 'grpc_asyncio', request_type=batch.GetTaskRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - )) - response = await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.GetTaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_task_async_from_dict(): - await test_get_task_async(request_type=dict) - -def test_get_task_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_task_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetTaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - await client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_task_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_task_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - batch.GetTaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_task_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = task.Task() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_task( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_task_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_task( - batch.GetTaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.ListTasksRequest, - dict, -]) -def test_list_tasks(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_tasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListTasksRequest( - parent='parent_value', - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_tasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListTasksRequest( - parent='parent_value', - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', - ) - -def test_list_tasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_tasks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_tasks] = mock_rpc - - request = {} - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tasks_async(transport: str = 'grpc_asyncio', request_type=batch.ListTasksRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.ListTasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_tasks_async_from_dict(): - await test_list_tasks_async(request_type=dict) - -def test_list_tasks_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = batch.ListTasksResponse() - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tasks_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListTasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse()) - await client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tasks_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tasks_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - batch.ListTasksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tasks_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tasks_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tasks( - batch.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) -def test_list_tasks_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tasks_async_pager(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, task.Task) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tasks_async_pages(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - batch.CreateResourceAllowanceRequest, - dict, -]) -def test_create_resource_allowance(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - ) - response = client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.CreateResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -def test_create_resource_allowance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.CreateResourceAllowanceRequest( - parent='parent_value', - resource_allowance_id='resource_allowance_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_resource_allowance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.CreateResourceAllowanceRequest( - parent='parent_value', - resource_allowance_id='resource_allowance_id_value', - ) - -def test_create_resource_allowance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_resource_allowance] = mock_rpc - request = {} - client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_resource_allowance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_resource_allowance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_resource_allowance] = mock_rpc - - request = {} - await client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_resource_allowance_async(transport: str = 'grpc_asyncio', request_type=batch.CreateResourceAllowanceRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - )) - response = await client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.CreateResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_create_resource_allowance_async_from_dict(): - await test_create_resource_allowance_async(request_type=dict) - -def test_create_resource_allowance_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CreateResourceAllowanceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_resource_allowance_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.CreateResourceAllowanceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance()) - await client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_resource_allowance_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_resource_allowance( - parent='parent_value', - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - resource_allowance_id='resource_allowance_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))) - assert arg == mock_val - arg = args[0].resource_allowance_id - mock_val = 'resource_allowance_id_value' - assert arg == mock_val - - -def test_create_resource_allowance_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), - parent='parent_value', - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - resource_allowance_id='resource_allowance_id_value', - ) - -@pytest.mark.asyncio -async def test_create_resource_allowance_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_resource_allowance( - parent='parent_value', - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - resource_allowance_id='resource_allowance_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))) - assert arg == mock_val - arg = args[0].resource_allowance_id - mock_val = 'resource_allowance_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_resource_allowance_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), - parent='parent_value', - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - resource_allowance_id='resource_allowance_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.GetResourceAllowanceRequest, - dict, -]) -def test_get_resource_allowance(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - ) - response = client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.GetResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -def test_get_resource_allowance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.GetResourceAllowanceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_resource_allowance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.GetResourceAllowanceRequest( - name='name_value', - ) - -def test_get_resource_allowance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_resource_allowance] = mock_rpc - request = {} - client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_resource_allowance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_resource_allowance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_resource_allowance] = mock_rpc - - request = {} - await client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_resource_allowance_async(transport: str = 'grpc_asyncio', request_type=batch.GetResourceAllowanceRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - )) - response = await client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.GetResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_get_resource_allowance_async_from_dict(): - await test_get_resource_allowance_async(request_type=dict) - -def test_get_resource_allowance_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetResourceAllowanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - call.return_value = resource_allowance.ResourceAllowance() - client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_resource_allowance_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.GetResourceAllowanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource_allowance.ResourceAllowance()) - await client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_resource_allowance_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_resource_allowance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_resource_allowance_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_resource_allowance_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resource_allowance.ResourceAllowance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource_allowance.ResourceAllowance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_resource_allowance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_resource_allowance_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.DeleteResourceAllowanceRequest, - dict, -]) -def test_delete_resource_allowance(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.DeleteResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_resource_allowance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.DeleteResourceAllowanceRequest( - name='name_value', - reason='reason_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_resource_allowance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.DeleteResourceAllowanceRequest( - name='name_value', - reason='reason_value', - ) - -def test_delete_resource_allowance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_resource_allowance] = mock_rpc - request = {} - client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_resource_allowance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_resource_allowance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_resource_allowance] = mock_rpc - - request = {} - await client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_resource_allowance_async(transport: str = 'grpc_asyncio', request_type=batch.DeleteResourceAllowanceRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.DeleteResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_resource_allowance_async_from_dict(): - await test_delete_resource_allowance_async(request_type=dict) - -def test_delete_resource_allowance_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.DeleteResourceAllowanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_resource_allowance_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.DeleteResourceAllowanceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_resource_allowance_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_resource_allowance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_resource_allowance_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_resource_allowance_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_resource_allowance( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_resource_allowance_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - batch.ListResourceAllowancesRequest, - dict, -]) -def test_list_resource_allowances(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.ListResourceAllowancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_resource_allowances_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.ListResourceAllowancesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_resource_allowances(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.ListResourceAllowancesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_resource_allowances_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_resource_allowances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_resource_allowances] = mock_rpc - request = {} - client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_resource_allowances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_resource_allowances in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_resource_allowances] = mock_rpc - - request = {} - await client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_resource_allowances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_resource_allowances_async(transport: str = 'grpc_asyncio', request_type=batch.ListResourceAllowancesRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(batch.ListResourceAllowancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.ListResourceAllowancesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_from_dict(): - await test_list_resource_allowances_async(request_type=dict) - -def test_list_resource_allowances_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListResourceAllowancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - call.return_value = batch.ListResourceAllowancesResponse() - client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_resource_allowances_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.ListResourceAllowancesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListResourceAllowancesResponse()) - await client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_resource_allowances_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_resource_allowances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_resource_allowances_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_resource_allowances_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = batch.ListResourceAllowancesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListResourceAllowancesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_resource_allowances( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_resource_allowances_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent='parent_value', - ) - - -def test_list_resource_allowances_pager(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token='abc', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token='def', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token='ghi', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_resource_allowances(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resource_allowance.ResourceAllowance) - for i in results) -def test_list_resource_allowances_pages(transport_name: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token='abc', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token='def', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token='ghi', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - pages = list(client.list_resource_allowances(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_pager(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token='abc', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token='def', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token='ghi', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_resource_allowances(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resource_allowance.ResourceAllowance) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_resource_allowances_async_pages(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token='abc', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token='def', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token='ghi', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_resource_allowances(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - batch.UpdateResourceAllowanceRequest, - dict, -]) -def test_update_resource_allowance(request_type, transport: str = 'grpc'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - ) - response = client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = batch.UpdateResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -def test_update_resource_allowance_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = batch.UpdateResourceAllowanceRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_resource_allowance(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == batch.UpdateResourceAllowanceRequest( - ) - -def test_update_resource_allowance_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_resource_allowance] = mock_rpc - request = {} - client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_resource_allowance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_resource_allowance in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_resource_allowance] = mock_rpc - - request = {} - await client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_resource_allowance_async(transport: str = 'grpc_asyncio', request_type=batch.UpdateResourceAllowanceRequest): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - )) - response = await client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = batch.UpdateResourceAllowanceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -@pytest.mark.asyncio -async def test_update_resource_allowance_async_from_dict(): - await test_update_resource_allowance_async(request_type=dict) - -def test_update_resource_allowance_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.UpdateResourceAllowanceRequest() - - request.resource_allowance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource_allowance.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_resource_allowance_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = batch.UpdateResourceAllowanceRequest() - - request.resource_allowance.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance()) - await client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource_allowance.name=name_value', - ) in kw['metadata'] - - -def test_update_resource_allowance_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_resource_allowance( - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_resource_allowance_flattened_error(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_resource_allowance_flattened_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcb_resource_allowance.ResourceAllowance() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_resource_allowance( - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource_allowance - mock_val = gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_resource_allowance_flattened_error_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_create_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_job] = mock_rpc - - request = {} - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_job_rest_required_fields(request_type=batch.CreateJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("job_id", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("jobId", "requestId", )) & set(("parent", "job", ))) - - -def test_create_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) - - -def test_create_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - batch.CreateJobRequest(), - parent='parent_value', - job=gcb_job.Job(name='name_value'), - job_id='job_id_value', - ) - - -def test_get_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_job] = mock_rpc - - request = {} - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_job_rest_required_fields(request_type=batch.GetJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = job.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_get_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - batch.GetJobRequest(), - name='name_value', - ) - - -def test_delete_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_job] = mock_rpc - - request = {} - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_delete_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - batch.DeleteJobRequest(), - name='name_value', - ) - - -def test_cancel_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.cancel_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.cancel_job] = mock_rpc - - request = {} - client.cancel_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.cancel_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_cancel_job_rest_required_fields(request_type=batch.CancelJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).cancel_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_cancel_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.cancel_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_cancel_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.cancel_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{name=projects/*/locations/*/jobs/*}:cancel" % client.transport._host, args[1]) - - -def test_cancel_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.cancel_job( - batch.CancelJobRequest(), - name='name_value', - ) - - -def test_update_job_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_job in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_job] = mock_rpc - - request = {} - client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_job(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_job_rest_required_fields(request_type=batch.UpdateJobRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_job._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_job(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_job_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("job", "updateMask", ))) - - -def test_update_job_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job() - - # get arguments that satisfy an http rule for this method - sample_request = {'job': {'name': 'projects/sample1/locations/sample2/jobs/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - job=gcb_job.Job(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{job.name=projects/*/locations/*/jobs/*}" % client.transport._host, args[1]) - - -def test_update_job_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job( - batch.UpdateJobRequest(), - job=gcb_job.Job(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_list_jobs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_jobs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_jobs] = mock_rpc - - request = {} - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_jobs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_jobs_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListJobsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = batch.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_jobs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{parent=projects/*/locations/*}/jobs" % client.transport._host, args[1]) - - -def test_list_jobs_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - batch.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_rest_pager(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - job.Job(), - ], - next_page_token='abc', - ), - batch.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - ], - next_page_token='ghi', - ), - batch.ListJobsResponse( - jobs=[ - job.Job(), - job.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(batch.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, job.Job) - for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_task_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_task in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_task] = mock_rpc - - request = {} - client.get_task(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_task(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_task_rest_required_fields(request_type=batch.GetTaskRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_task._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = task.Task() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = task.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_task(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_task_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_task._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_task_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = task.Task() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4/tasks/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = task.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_task(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{name=projects/*/locations/*/jobs/*/taskGroups/*/tasks/*}" % client.transport._host, args[1]) - - -def test_get_task_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_task( - batch.GetTaskRequest(), - name='name_value', - ) - - -def test_list_tasks_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tasks] = mock_rpc - - request = {} - client.list_tasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_tasks_rest_required_fields(request_type=batch.ListTasksRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tasks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = batch.ListTasksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_tasks(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_tasks_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_tasks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_tasks_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListTasksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = batch.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_tasks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{parent=projects/*/locations/*/jobs/*/taskGroups/*}/tasks" % client.transport._host, args[1]) - - -def test_list_tasks_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tasks( - batch.ListTasksRequest(), - parent='parent_value', - ) - - -def test_list_tasks_rest_pager(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - task.Task(), - ], - next_page_token='abc', - ), - batch.ListTasksResponse( - tasks=[], - next_page_token='def', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - ], - next_page_token='ghi', - ), - batch.ListTasksResponse( - tasks=[ - task.Task(), - task.Task(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(batch.ListTasksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - - pager = client.list_tasks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, task.Task) - for i in results) - - pages = list(client.list_tasks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_resource_allowance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_resource_allowance] = mock_rpc - - request = {} - client.create_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_resource_allowance_rest_required_fields(request_type=batch.CreateResourceAllowanceRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_resource_allowance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_resource_allowance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "resource_allowance_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_resource_allowance.ResourceAllowance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_resource_allowance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_resource_allowance_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_resource_allowance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "resourceAllowanceId", )) & set(("parent", "resourceAllowance", ))) - - -def test_create_resource_allowance_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_resource_allowance.ResourceAllowance() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - resource_allowance_id='resource_allowance_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_resource_allowance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{parent=projects/*/locations/*}/resourceAllowances" % client.transport._host, args[1]) - - -def test_create_resource_allowance_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_resource_allowance( - batch.CreateResourceAllowanceRequest(), - parent='parent_value', - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - resource_allowance_id='resource_allowance_id_value', - ) - - -def test_get_resource_allowance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_resource_allowance] = mock_rpc - - request = {} - client.get_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_resource_allowance_rest_required_fields(request_type=batch.GetResourceAllowanceRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_resource_allowance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_resource_allowance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = resource_allowance.ResourceAllowance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_resource_allowance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_resource_allowance_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_resource_allowance._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_resource_allowance_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resource_allowance.ResourceAllowance() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_resource_allowance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{name=projects/*/locations/*/resourceAllowances/*}" % client.transport._host, args[1]) - - -def test_get_resource_allowance_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_resource_allowance( - batch.GetResourceAllowanceRequest(), - name='name_value', - ) - - -def test_delete_resource_allowance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_resource_allowance] = mock_rpc - - request = {} - client.delete_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_resource_allowance_rest_required_fields(request_type=batch.DeleteResourceAllowanceRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_resource_allowance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_resource_allowance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("reason", "request_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_resource_allowance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_resource_allowance_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_resource_allowance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("reason", "requestId", )) & set(("name", ))) - - -def test_delete_resource_allowance_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_resource_allowance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{name=projects/*/locations/*/resourceAllowances/*}" % client.transport._host, args[1]) - - -def test_delete_resource_allowance_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_resource_allowance( - batch.DeleteResourceAllowanceRequest(), - name='name_value', - ) - - -def test_list_resource_allowances_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_resource_allowances in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_resource_allowances] = mock_rpc - - request = {} - client.list_resource_allowances(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_resource_allowances(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_resource_allowances_rest_required_fields(request_type=batch.ListResourceAllowancesRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_resource_allowances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_resource_allowances._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = batch.ListResourceAllowancesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListResourceAllowancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_resource_allowances(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_resource_allowances_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_resource_allowances._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_resource_allowances_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListResourceAllowancesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = batch.ListResourceAllowancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_resource_allowances(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{parent=projects/*/locations/*}/resourceAllowances" % client.transport._host, args[1]) - - -def test_list_resource_allowances_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_resource_allowances( - batch.ListResourceAllowancesRequest(), - parent='parent_value', - ) - - -def test_list_resource_allowances_rest_pager(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - next_page_token='abc', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[], - next_page_token='def', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - ], - next_page_token='ghi', - ), - batch.ListResourceAllowancesResponse( - resource_allowances=[ - resource_allowance.ResourceAllowance(), - resource_allowance.ResourceAllowance(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(batch.ListResourceAllowancesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_resource_allowances(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resource_allowance.ResourceAllowance) - for i in results) - - pages = list(client.list_resource_allowances(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_resource_allowance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_resource_allowance in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_resource_allowance] = mock_rpc - - request = {} - client.update_resource_allowance(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_resource_allowance(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_resource_allowance_rest_required_fields(request_type=batch.UpdateResourceAllowanceRequest): - transport_class = transports.BatchServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_resource_allowance._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_resource_allowance._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcb_resource_allowance.ResourceAllowance() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_resource_allowance(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_resource_allowance_rest_unset_required_fields(): - transport = transports.BatchServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_resource_allowance._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", )) & set(("resourceAllowance", "updateMask", ))) - - -def test_update_resource_allowance_rest_flattened(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_resource_allowance.ResourceAllowance() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource_allowance': {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcb_resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_resource_allowance(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha/{resource_allowance.name=projects/*/locations/*/resourceAllowances/*}" % client.transport._host, args[1]) - - -def test_update_resource_allowance_rest_flattened_error(transport: str = 'rest'): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_resource_allowance( - batch.UpdateResourceAllowanceRequest(), - resource_allowance=gcb_resource_allowance.ResourceAllowance(usage_resource_allowance=gcb_resource_allowance.UsageResourceAllowance(spec=gcb_resource_allowance.UsageResourceAllowanceSpec(type_='type__value'))), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BatchServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BatchServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BatchServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BatchServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceGrpcAsyncIOTransport, - transports.BatchServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = BatchServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = gcb_job.Job() - client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = job.Job() - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_job_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = gcb_job.Job() - client.update_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.UpdateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = batch.ListJobsResponse() - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - call.return_value = task.Task() - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - call.return_value = batch.ListTasksResponse() - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_resource_allowance_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.create_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_resource_allowance_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - call.return_value = resource_allowance.ResourceAllowance() - client.get_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_resource_allowance_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_resource_allowances_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - call.return_value = batch.ListResourceAllowancesResponse() - client.list_resource_allowances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListResourceAllowancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_resource_allowance_empty_call_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - call.return_value = gcb_resource_allowance.ResourceAllowance() - client.update_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.UpdateResourceAllowanceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = BatchServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - )) - await client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - )) - await client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_job_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - )) - await client.update_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.UpdateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_jobs_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_task_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(task.Task( - name='name_value', - )) - await client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tasks_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_resource_allowance_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - )) - await client.create_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_resource_allowance_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - )) - await client.get_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_resource_allowance_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_resource_allowances_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(batch.ListResourceAllowancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_resource_allowances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListResourceAllowancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_resource_allowance_empty_call_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - )) - await client.update_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.UpdateResourceAllowanceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = BatchServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_job_rest_bad_request(request_type=batch.CreateJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.CreateJobRequest, - dict, -]) -def test_create_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["job"] = {'name': 'name_value', 'uid': 'uid_value', 'priority': 898, 'task_groups': [{'name': 'name_value', 'task_spec': {'runnables': [{'container': {'image_uri': 'image_uri_value', 'commands': ['commands_value1', 'commands_value2'], 'entrypoint': 'entrypoint_value', 'volumes': ['volumes_value1', 'volumes_value2'], 'options': 'options_value', 'block_external_network': True, 'username': 'username_value', 'password': 'password_value', 'enable_image_streaming': True}, 'script': {'path': 'path_value', 'text': 'text_value'}, 'barrier': {'name': 'name_value'}, 'display_name': 'display_name_value', 'ignore_exit_status': True, 'background': True, 'always_run': True, 'environment': {'variables': {}, 'secret_variables': {}, 'encrypted_variables': {'key_name': 'key_name_value', 'cipher_text': 'cipher_text_value'}}, 'timeout': {'seconds': 751, 'nanos': 543}, 'labels': {}}], 'compute_resource': {'cpu_milli': 958, 'memory_mib': 1072, 'gpu_count': 980, 'boot_disk_mib': 1365}, 'max_run_duration': {}, 'max_retry_count': 1635, 'lifecycle_policies': [{'action': 1, 'action_condition': {'exit_codes': [1064, 1065]}}], 'environments': {}, 'volumes': [{'nfs': {'server': 'server_value', 'remote_path': 'remote_path_value'}, 'pd': {'disk': 'disk_value', 'device': 'device_value', 'existing': True}, 'gcs': {'remote_path': 'remote_path_value'}, 'device_name': 'device_name_value', 'mount_path': 'mount_path_value', 'mount_options': ['mount_options_value1', 'mount_options_value2']}], 'environment': {}}, 'task_count': 1083, 'parallelism': 1174, 'scheduling_policy': 1, 'allocation_policy': {'location': {'allowed_locations': ['allowed_locations_value1', 'allowed_locations_value2'], 'denied_locations': ['denied_locations_value1', 'denied_locations_value2']}, 'instance': {'allowed_machine_types': ['allowed_machine_types_value1', 'allowed_machine_types_value2'], 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value', 'provisioning_model': 1, 'accelerators': [{'type_': 'type__value', 'count': 553, 'install_gpu_drivers': True, 'driver_version': 'driver_version_value'}], 'boot_disk': {'image': 'image_value', 'snapshot': 'snapshot_value', 'type_': 'type__value', 'size_gb': 739, 'disk_interface': 'disk_interface_value'}, 'disks': [{'new_disk': {}, 'existing_disk': 'existing_disk_value', 'device_name': 'device_name_value'}], 'reservation': 'reservation_value'}, 'instances': [{'policy': {}, 'instance_template': 'instance_template_value', 'install_gpu_drivers': True, 'install_ops_agent': True, 'block_project_ssh_keys': True}], 'instance_templates': ['instance_templates_value1', 'instance_templates_value2'], 'provisioning_models': [1], 'service_account_email': 'service_account_email_value', 'service_account': {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}, 'labels': {}, 'network': {'network_interfaces': [{'network': 'network_value', 'subnetwork': 'subnetwork_value', 'no_external_ip_address': True}]}, 'placement': {'collocation': 'collocation_value', 'max_distance': 1264}, 'tags': ['tags_value1', 'tags_value2']}, 'labels': {}, 'task_environments': {}, 'task_count_per_node': 2022, 'require_hosts_file': True, 'permissive_ssh': True, 'run_as_non_root': True, 'service_account': {}}], 'scheduling_policy': 1, 'dependencies': [{'items': {}}], 'allocation_policy': {}, 'labels': {}, 'status': {'state': 1, 'status_events': [{'type_': 'type__value', 'description': 'description_value', 'event_time': {'seconds': 751, 'nanos': 543}, 'task_execution': {'exit_code': 948, 'stderr_snippet': 'stderr_snippet_value'}, 'task_state': 1}], 'task_groups': {}, 'run_duration': {}, 'resource_usage': {'core_hours': 0.1081}}, 'notification': {'pubsub_topic': 'pubsub_topic_value', 'message': {'type_': 1, 'new_job_state': 1, 'new_task_state': 1}}, 'create_time': {}, 'update_time': {}, 'logs_policy': {'destination': 1, 'logs_path': 'logs_path_value', 'cloud_logging_option': {'use_generic_task_monitored_resource': True}}, 'notifications': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = batch.CreateJobRequest.meta.fields["job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_create_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_create_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_create_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.CreateJobRequest.pb(batch.CreateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_job.Job.to_json(gcb_job.Job()) - req.return_value.content = return_value - - request = batch.CreateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_job.Job() - post_with_metadata.return_value = gcb_job.Job(), metadata - - client.create_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_job_rest_bad_request(request_type=batch.GetJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.GetJobRequest, - dict, -]) -def test_get_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_get_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.GetJobRequest.pb(batch.GetJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = job.Job.to_json(job.Job()) - req.return_value.content = return_value - - request = batch.GetJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = job.Job() - post_with_metadata.return_value = job.Job(), metadata - - client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_job_rest_bad_request(request_type=batch.DeleteJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.DeleteJobRequest, - dict, -]) -def test_delete_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_job(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_delete_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_delete_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_delete_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.DeleteJobRequest.pb(batch.DeleteJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = batch.DeleteJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_cancel_job_rest_bad_request(request_type=batch.CancelJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.CancelJobRequest, - dict, -]) -def test_cancel_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_job(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_cancel_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_cancel_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_cancel_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.CancelJobRequest.pb(batch.CancelJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = batch.CancelJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.cancel_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_job_rest_bad_request(request_type=batch.UpdateJobRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'job': {'name': 'projects/sample1/locations/sample2/jobs/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_job(request) - - -@pytest.mark.parametrize("request_type", [ - batch.UpdateJobRequest, - dict, -]) -def test_update_job_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'job': {'name': 'projects/sample1/locations/sample2/jobs/sample3'}} - request_init["job"] = {'name': 'projects/sample1/locations/sample2/jobs/sample3', 'uid': 'uid_value', 'priority': 898, 'task_groups': [{'name': 'name_value', 'task_spec': {'runnables': [{'container': {'image_uri': 'image_uri_value', 'commands': ['commands_value1', 'commands_value2'], 'entrypoint': 'entrypoint_value', 'volumes': ['volumes_value1', 'volumes_value2'], 'options': 'options_value', 'block_external_network': True, 'username': 'username_value', 'password': 'password_value', 'enable_image_streaming': True}, 'script': {'path': 'path_value', 'text': 'text_value'}, 'barrier': {'name': 'name_value'}, 'display_name': 'display_name_value', 'ignore_exit_status': True, 'background': True, 'always_run': True, 'environment': {'variables': {}, 'secret_variables': {}, 'encrypted_variables': {'key_name': 'key_name_value', 'cipher_text': 'cipher_text_value'}}, 'timeout': {'seconds': 751, 'nanos': 543}, 'labels': {}}], 'compute_resource': {'cpu_milli': 958, 'memory_mib': 1072, 'gpu_count': 980, 'boot_disk_mib': 1365}, 'max_run_duration': {}, 'max_retry_count': 1635, 'lifecycle_policies': [{'action': 1, 'action_condition': {'exit_codes': [1064, 1065]}}], 'environments': {}, 'volumes': [{'nfs': {'server': 'server_value', 'remote_path': 'remote_path_value'}, 'pd': {'disk': 'disk_value', 'device': 'device_value', 'existing': True}, 'gcs': {'remote_path': 'remote_path_value'}, 'device_name': 'device_name_value', 'mount_path': 'mount_path_value', 'mount_options': ['mount_options_value1', 'mount_options_value2']}], 'environment': {}}, 'task_count': 1083, 'parallelism': 1174, 'scheduling_policy': 1, 'allocation_policy': {'location': {'allowed_locations': ['allowed_locations_value1', 'allowed_locations_value2'], 'denied_locations': ['denied_locations_value1', 'denied_locations_value2']}, 'instance': {'allowed_machine_types': ['allowed_machine_types_value1', 'allowed_machine_types_value2'], 'machine_type': 'machine_type_value', 'min_cpu_platform': 'min_cpu_platform_value', 'provisioning_model': 1, 'accelerators': [{'type_': 'type__value', 'count': 553, 'install_gpu_drivers': True, 'driver_version': 'driver_version_value'}], 'boot_disk': {'image': 'image_value', 'snapshot': 'snapshot_value', 'type_': 'type__value', 'size_gb': 739, 'disk_interface': 'disk_interface_value'}, 'disks': [{'new_disk': {}, 'existing_disk': 'existing_disk_value', 'device_name': 'device_name_value'}], 'reservation': 'reservation_value'}, 'instances': [{'policy': {}, 'instance_template': 'instance_template_value', 'install_gpu_drivers': True, 'install_ops_agent': True, 'block_project_ssh_keys': True}], 'instance_templates': ['instance_templates_value1', 'instance_templates_value2'], 'provisioning_models': [1], 'service_account_email': 'service_account_email_value', 'service_account': {'email': 'email_value', 'scopes': ['scopes_value1', 'scopes_value2']}, 'labels': {}, 'network': {'network_interfaces': [{'network': 'network_value', 'subnetwork': 'subnetwork_value', 'no_external_ip_address': True}]}, 'placement': {'collocation': 'collocation_value', 'max_distance': 1264}, 'tags': ['tags_value1', 'tags_value2']}, 'labels': {}, 'task_environments': {}, 'task_count_per_node': 2022, 'require_hosts_file': True, 'permissive_ssh': True, 'run_as_non_root': True, 'service_account': {}}], 'scheduling_policy': 1, 'dependencies': [{'items': {}}], 'allocation_policy': {}, 'labels': {}, 'status': {'state': 1, 'status_events': [{'type_': 'type__value', 'description': 'description_value', 'event_time': {'seconds': 751, 'nanos': 543}, 'task_execution': {'exit_code': 948, 'stderr_snippet': 'stderr_snippet_value'}, 'task_state': 1}], 'task_groups': {}, 'run_duration': {}, 'resource_usage': {'core_hours': 0.1081}}, 'notification': {'pubsub_topic': 'pubsub_topic_value', 'message': {'type_': 1, 'new_job_state': 1, 'new_task_state': 1}}, 'create_time': {}, 'update_time': {}, 'logs_policy': {'destination': 1, 'logs_path': 'logs_path_value', 'cloud_logging_option': {'use_generic_task_monitored_resource': True}}, 'notifications': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = batch.UpdateJobRequest.meta.fields["job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["job"][field])): - del request_init["job"][field][i][subfield] - else: - del request_init["job"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_job.Job( - name='name_value', - uid='uid_value', - priority=898, - scheduling_policy=gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_job.Job.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_job.Job) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.priority == 898 - assert response.scheduling_policy == gcb_job.Job.SchedulingPolicy.AS_SOON_AS_POSSIBLE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_update_job") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_update_job_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_update_job") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.UpdateJobRequest.pb(batch.UpdateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_job.Job.to_json(gcb_job.Job()) - req.return_value.content = return_value - - request = batch.UpdateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_job.Job() - post_with_metadata.return_value = gcb_job.Job(), metadata - - client.update_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_jobs_rest_bad_request(request_type=batch.ListJobsRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - batch.ListJobsRequest, - dict, -]) -def test_list_jobs_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_jobs_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_jobs") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_jobs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.ListJobsRequest.pb(batch.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = batch.ListJobsResponse.to_json(batch.ListJobsResponse()) - req.return_value.content = return_value - - request = batch.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batch.ListJobsResponse() - post_with_metadata.return_value = batch.ListJobsResponse(), metadata - - client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_task_rest_bad_request(request_type=batch.GetTaskRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4/tasks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_task(request) - - -@pytest.mark.parametrize("request_type", [ - batch.GetTaskRequest, - dict, -]) -def test_get_task_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4/tasks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = task.Task( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = task.Task.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_task(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, task.Task) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_task_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_task") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_task_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_get_task") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.GetTaskRequest.pb(batch.GetTaskRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = task.Task.to_json(task.Task()) - req.return_value.content = return_value - - request = batch.GetTaskRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = task.Task() - post_with_metadata.return_value = task.Task(), metadata - - client.get_task(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_tasks_rest_bad_request(request_type=batch.ListTasksRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_tasks(request) - - -@pytest.mark.parametrize("request_type", [ - batch.ListTasksRequest, - dict, -]) -def test_list_tasks_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/jobs/sample3/taskGroups/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListTasksResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListTasksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_tasks(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTasksPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tasks_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_tasks") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_tasks_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_list_tasks") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.ListTasksRequest.pb(batch.ListTasksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = batch.ListTasksResponse.to_json(batch.ListTasksResponse()) - req.return_value.content = return_value - - request = batch.ListTasksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batch.ListTasksResponse() - post_with_metadata.return_value = batch.ListTasksResponse(), metadata - - client.list_tasks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_resource_allowance_rest_bad_request(request_type=batch.CreateResourceAllowanceRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_resource_allowance(request) - - -@pytest.mark.parametrize("request_type", [ - batch.CreateResourceAllowanceRequest, - dict, -]) -def test_create_resource_allowance_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["resource_allowance"] = {'usage_resource_allowance': {'spec': {'type_': 'type__value', 'limit': {'calendar_period': 1, 'limit': 0.543}}, 'status': {'state': 1, 'limit_status': {'consumption_interval': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'limit': 0.543, 'consumed': 0.862}, 'report': {'latest_period_consumptions': {}}}}, 'name': 'name_value', 'uid': 'uid_value', 'create_time': {}, 'labels': {}, 'notifications': [{'pubsub_topic': 'pubsub_topic_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = batch.CreateResourceAllowanceRequest.meta.fields["resource_allowance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["resource_allowance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["resource_allowance"][field])): - del request_init["resource_allowance"][field][i][subfield] - else: - del request_init["resource_allowance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_resource_allowance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_resource_allowance_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_create_resource_allowance") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_create_resource_allowance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_create_resource_allowance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.CreateResourceAllowanceRequest.pb(batch.CreateResourceAllowanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_resource_allowance.ResourceAllowance.to_json(gcb_resource_allowance.ResourceAllowance()) - req.return_value.content = return_value - - request = batch.CreateResourceAllowanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_resource_allowance.ResourceAllowance() - post_with_metadata.return_value = gcb_resource_allowance.ResourceAllowance(), metadata - - client.create_resource_allowance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_resource_allowance_rest_bad_request(request_type=batch.GetResourceAllowanceRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_allowance(request) - - -@pytest.mark.parametrize("request_type", [ - batch.GetResourceAllowanceRequest, - dict, -]) -def test_get_resource_allowance_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_allowance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_allowance_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_resource_allowance") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_get_resource_allowance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_get_resource_allowance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.GetResourceAllowanceRequest.pb(batch.GetResourceAllowanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resource_allowance.ResourceAllowance.to_json(resource_allowance.ResourceAllowance()) - req.return_value.content = return_value - - request = batch.GetResourceAllowanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = resource_allowance.ResourceAllowance() - post_with_metadata.return_value = resource_allowance.ResourceAllowance(), metadata - - client.get_resource_allowance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_resource_allowance_rest_bad_request(request_type=batch.DeleteResourceAllowanceRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_resource_allowance(request) - - -@pytest.mark.parametrize("request_type", [ - batch.DeleteResourceAllowanceRequest, - dict, -]) -def test_delete_resource_allowance_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_resource_allowance(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_resource_allowance_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_delete_resource_allowance") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_delete_resource_allowance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_delete_resource_allowance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.DeleteResourceAllowanceRequest.pb(batch.DeleteResourceAllowanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = batch.DeleteResourceAllowanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_resource_allowance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_resource_allowances_rest_bad_request(request_type=batch.ListResourceAllowancesRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_allowances(request) - - -@pytest.mark.parametrize("request_type", [ - batch.ListResourceAllowancesRequest, - dict, -]) -def test_list_resource_allowances_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = batch.ListResourceAllowancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = batch.ListResourceAllowancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_allowances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceAllowancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resource_allowances_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_resource_allowances") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_list_resource_allowances_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_list_resource_allowances") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.ListResourceAllowancesRequest.pb(batch.ListResourceAllowancesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = batch.ListResourceAllowancesResponse.to_json(batch.ListResourceAllowancesResponse()) - req.return_value.content = return_value - - request = batch.ListResourceAllowancesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = batch.ListResourceAllowancesResponse() - post_with_metadata.return_value = batch.ListResourceAllowancesResponse(), metadata - - client.list_resource_allowances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_resource_allowance_rest_bad_request(request_type=batch.UpdateResourceAllowanceRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource_allowance': {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_resource_allowance(request) - - -@pytest.mark.parametrize("request_type", [ - batch.UpdateResourceAllowanceRequest, - dict, -]) -def test_update_resource_allowance_rest_call_success(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource_allowance': {'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3'}} - request_init["resource_allowance"] = {'usage_resource_allowance': {'spec': {'type_': 'type__value', 'limit': {'calendar_period': 1, 'limit': 0.543}}, 'status': {'state': 1, 'limit_status': {'consumption_interval': {'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'limit': 0.543, 'consumed': 0.862}, 'report': {'latest_period_consumptions': {}}}}, 'name': 'projects/sample1/locations/sample2/resourceAllowances/sample3', 'uid': 'uid_value', 'create_time': {}, 'labels': {}, 'notifications': [{'pubsub_topic': 'pubsub_topic_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = batch.UpdateResourceAllowanceRequest.meta.fields["resource_allowance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["resource_allowance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["resource_allowance"][field])): - del request_init["resource_allowance"][field][i][subfield] - else: - del request_init["resource_allowance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcb_resource_allowance.ResourceAllowance( - name='name_value', - uid='uid_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcb_resource_allowance.ResourceAllowance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_resource_allowance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcb_resource_allowance.ResourceAllowance) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_resource_allowance_rest_interceptors(null_interceptor): - transport = transports.BatchServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BatchServiceRestInterceptor(), - ) - client = BatchServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_update_resource_allowance") as post, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "post_update_resource_allowance_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BatchServiceRestInterceptor, "pre_update_resource_allowance") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = batch.UpdateResourceAllowanceRequest.pb(batch.UpdateResourceAllowanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcb_resource_allowance.ResourceAllowance.to_json(gcb_resource_allowance.ResourceAllowance()) - req.return_value.content = return_value - - request = batch.UpdateResourceAllowanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcb_resource_allowance.ResourceAllowance() - post_with_metadata.return_value = gcb_resource_allowance.ResourceAllowance(), metadata - - client.update_resource_allowance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_job), - '__call__') as call: - client.cancel_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CancelJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_job_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - client.update_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.UpdateJobRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_jobs_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListJobsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_task_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_task), - '__call__') as call: - client.get_task(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetTaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tasks_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tasks), - '__call__') as call: - client.list_tasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListTasksRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_resource_allowance_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_resource_allowance), - '__call__') as call: - client.create_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.CreateResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_resource_allowance_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_allowance), - '__call__') as call: - client.get_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.GetResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_resource_allowance_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_resource_allowance), - '__call__') as call: - client.delete_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.DeleteResourceAllowanceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_resource_allowances_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_allowances), - '__call__') as call: - client.list_resource_allowances(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.ListResourceAllowancesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_resource_allowance_empty_call_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_resource_allowance), - '__call__') as call: - client.update_resource_allowance(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = batch.UpdateResourceAllowanceRequest() - - assert args[0] == request_msg - - -def test_batch_service_rest_lro_client(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BatchServiceGrpcTransport, - ) - -def test_batch_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BatchServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_batch_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.batch_v1alpha.services.batch_service.transports.BatchServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BatchServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job', - 'get_job', - 'delete_job', - 'cancel_job', - 'update_job', - 'list_jobs', - 'get_task', - 'list_tasks', - 'create_resource_allowance', - 'get_resource_allowance', - 'delete_resource_allowance', - 'list_resource_allowances', - 'update_resource_allowance', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_batch_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.batch_v1alpha.services.batch_service.transports.BatchServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BatchServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_batch_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.batch_v1alpha.services.batch_service.transports.BatchServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BatchServiceTransport() - adc.assert_called_once() - - -def test_batch_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BatchServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceGrpcAsyncIOTransport, - ], -) -def test_batch_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BatchServiceGrpcTransport, - transports.BatchServiceGrpcAsyncIOTransport, - transports.BatchServiceRestTransport, - ], -) -def test_batch_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BatchServiceGrpcTransport, grpc_helpers), - (transports.BatchServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_batch_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "batch.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="batch.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BatchServiceGrpcTransport, transports.BatchServiceGrpcAsyncIOTransport]) -def test_batch_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_batch_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BatchServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_batch_service_host_no_port(transport_name): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='batch.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'batch.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://batch.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_batch_service_host_with_port(transport_name): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='batch.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'batch.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://batch.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_batch_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = BatchServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = BatchServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_job._session - session2 = client2.transport.create_job._session - assert session1 != session2 - session1 = client1.transport.get_job._session - session2 = client2.transport.get_job._session - assert session1 != session2 - session1 = client1.transport.delete_job._session - session2 = client2.transport.delete_job._session - assert session1 != session2 - session1 = client1.transport.cancel_job._session - session2 = client2.transport.cancel_job._session - assert session1 != session2 - session1 = client1.transport.update_job._session - session2 = client2.transport.update_job._session - assert session1 != session2 - session1 = client1.transport.list_jobs._session - session2 = client2.transport.list_jobs._session - assert session1 != session2 - session1 = client1.transport.get_task._session - session2 = client2.transport.get_task._session - assert session1 != session2 - session1 = client1.transport.list_tasks._session - session2 = client2.transport.list_tasks._session - assert session1 != session2 - session1 = client1.transport.create_resource_allowance._session - session2 = client2.transport.create_resource_allowance._session - assert session1 != session2 - session1 = client1.transport.get_resource_allowance._session - session2 = client2.transport.get_resource_allowance._session - assert session1 != session2 - session1 = client1.transport.delete_resource_allowance._session - session2 = client2.transport.delete_resource_allowance._session - assert session1 != session2 - session1 = client1.transport.list_resource_allowances._session - session2 = client2.transport.list_resource_allowances._session - assert session1 != session2 - session1 = client1.transport.update_resource_allowance._session - session2 = client2.transport.update_resource_allowance._session - assert session1 != session2 -def test_batch_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BatchServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_batch_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BatchServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BatchServiceGrpcTransport, transports.BatchServiceGrpcAsyncIOTransport]) -def test_batch_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BatchServiceGrpcTransport, transports.BatchServiceGrpcAsyncIOTransport]) -def test_batch_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_batch_service_grpc_lro_client(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_batch_service_grpc_lro_async_client(): - client = BatchServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_job_path(): - project = "squid" - location = "clam" - job = "whelk" - expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - actual = BatchServiceClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "job": "nudibranch", - } - path = BatchServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_job_path(path) - assert expected == actual - -def test_resource_allowance_path(): - project = "cuttlefish" - location = "mussel" - resource_allowance = "winkle" - expected = "projects/{project}/locations/{location}/resourceAllowances/{resource_allowance}".format(project=project, location=location, resource_allowance=resource_allowance, ) - actual = BatchServiceClient.resource_allowance_path(project, location, resource_allowance) - assert expected == actual - - -def test_parse_resource_allowance_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "resource_allowance": "abalone", - } - path = BatchServiceClient.resource_allowance_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_resource_allowance_path(path) - assert expected == actual - -def test_task_path(): - project = "squid" - location = "clam" - job = "whelk" - task_group = "octopus" - task = "oyster" - expected = "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}/tasks/{task}".format(project=project, location=location, job=job, task_group=task_group, task=task, ) - actual = BatchServiceClient.task_path(project, location, job, task_group, task) - assert expected == actual - - -def test_parse_task_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "job": "mussel", - "task_group": "winkle", - "task": "nautilus", - } - path = BatchServiceClient.task_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_task_path(path) - assert expected == actual - -def test_task_group_path(): - project = "scallop" - location = "abalone" - job = "squid" - task_group = "clam" - expected = "projects/{project}/locations/{location}/jobs/{job}/taskGroups/{task_group}".format(project=project, location=location, job=job, task_group=task_group, ) - actual = BatchServiceClient.task_group_path(project, location, job, task_group) - assert expected == actual - - -def test_parse_task_group_path(): - expected = { - "project": "whelk", - "location": "octopus", - "job": "oyster", - "task_group": "nudibranch", - } - path = BatchServiceClient.task_group_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_task_group_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BatchServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = BatchServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = BatchServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = BatchServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BatchServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = BatchServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = BatchServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = BatchServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BatchServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = BatchServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BatchServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BatchServiceTransport, '_prep_wrapped_messages') as prep: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BatchServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = BatchServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = BatchServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = BatchServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BatchServiceClient, transports.BatchServiceGrpcTransport), - (BatchServiceAsyncClient, transports.BatchServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/.coveragerc b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/.coveragerc deleted file mode 100644 index 040b82c6cc08..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/beyondcorp_appconnections/__init__.py - google/cloud/beyondcorp_appconnections/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/.flake8 b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/MANIFEST.in b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/MANIFEST.in deleted file mode 100644 index 98aecbf30d34..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/beyondcorp_appconnections *.py -recursive-include google/cloud/beyondcorp_appconnections_v1 *.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/README.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/README.rst deleted file mode 100644 index 022407eafad4..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Beyondcorp Appconnections API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Beyondcorp Appconnections API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/app_connections_service.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/app_connections_service.rst deleted file mode 100644 index 66fd5c6b25e9..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/app_connections_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AppConnectionsService ---------------------------------------- - -.. automodule:: google.cloud.beyondcorp_appconnections_v1.services.app_connections_service - :members: - :inherited-members: - -.. automodule:: google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/services_.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/services_.rst deleted file mode 100644 index a214c2ba89db..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Beyondcorp Appconnections v1 API -========================================================== -.. toctree:: - :maxdepth: 2 - - app_connections_service diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/types_.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/types_.rst deleted file mode 100644 index 751c1ccc111f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/beyondcorp_appconnections_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Beyondcorp Appconnections v1 API -======================================================= - -.. automodule:: google.cloud.beyondcorp_appconnections_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/conf.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/conf.py deleted file mode 100644 index 5ec5667f2b5c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-beyondcorp-appconnections documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-beyondcorp-appconnections" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-beyondcorp-appconnections-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-appconnections.tex", - u"google-cloud-beyondcorp-appconnections Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-beyondcorp-appconnections", - u"Google Cloud Beyondcorp Appconnections Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-appconnections", - u"google-cloud-beyondcorp-appconnections Documentation", - author, - "google-cloud-beyondcorp-appconnections", - "GAPIC library for Google Cloud Beyondcorp Appconnections API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/index.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/index.rst deleted file mode 100644 index 10338aa21635..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - beyondcorp_appconnections_v1/services_ - beyondcorp_appconnections_v1/types_ diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/__init__.py deleted file mode 100644 index 36a6143838ee..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_appconnections import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.client import AppConnectionsServiceClient -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.async_client import AppConnectionsServiceAsyncClient - -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import AppConnection -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import AppConnectionOperationMetadata -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import CreateAppConnectionRequest -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import DeleteAppConnectionRequest -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import GetAppConnectionRequest -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import ListAppConnectionsRequest -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import ListAppConnectionsResponse -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import ResolveAppConnectionsRequest -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import ResolveAppConnectionsResponse -from google.cloud.beyondcorp_appconnections_v1.types.app_connections_service import UpdateAppConnectionRequest - -__all__ = ('AppConnectionsServiceClient', - 'AppConnectionsServiceAsyncClient', - 'AppConnection', - 'AppConnectionOperationMetadata', - 'CreateAppConnectionRequest', - 'DeleteAppConnectionRequest', - 'GetAppConnectionRequest', - 'ListAppConnectionsRequest', - 'ListAppConnectionsResponse', - 'ResolveAppConnectionsRequest', - 'ResolveAppConnectionsResponse', - 'UpdateAppConnectionRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/py.typed b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/py.typed deleted file mode 100644 index a82b08bb7ba0..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-appconnections package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/__init__.py deleted file mode 100644 index 3b0d4f469a98..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/__init__.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_appconnections_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.app_connections_service import AppConnectionsServiceClient -from .services.app_connections_service import AppConnectionsServiceAsyncClient - -from .types.app_connections_service import AppConnection -from .types.app_connections_service import AppConnectionOperationMetadata -from .types.app_connections_service import CreateAppConnectionRequest -from .types.app_connections_service import DeleteAppConnectionRequest -from .types.app_connections_service import GetAppConnectionRequest -from .types.app_connections_service import ListAppConnectionsRequest -from .types.app_connections_service import ListAppConnectionsResponse -from .types.app_connections_service import ResolveAppConnectionsRequest -from .types.app_connections_service import ResolveAppConnectionsResponse -from .types.app_connections_service import UpdateAppConnectionRequest - -__all__ = ( - 'AppConnectionsServiceAsyncClient', -'AppConnection', -'AppConnectionOperationMetadata', -'AppConnectionsServiceClient', -'CreateAppConnectionRequest', -'DeleteAppConnectionRequest', -'GetAppConnectionRequest', -'ListAppConnectionsRequest', -'ListAppConnectionsResponse', -'ResolveAppConnectionsRequest', -'ResolveAppConnectionsResponse', -'UpdateAppConnectionRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/gapic_metadata.json deleted file mode 100644 index 7187d65eb843..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/gapic_metadata.json +++ /dev/null @@ -1,118 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.beyondcorp_appconnections_v1", - "protoPackage": "google.cloud.beyondcorp.appconnections.v1", - "schema": "1.0", - "services": { - "AppConnectionsService": { - "clients": { - "grpc": { - "libraryClient": "AppConnectionsServiceClient", - "rpcs": { - "CreateAppConnection": { - "methods": [ - "create_app_connection" - ] - }, - "DeleteAppConnection": { - "methods": [ - "delete_app_connection" - ] - }, - "GetAppConnection": { - "methods": [ - "get_app_connection" - ] - }, - "ListAppConnections": { - "methods": [ - "list_app_connections" - ] - }, - "ResolveAppConnections": { - "methods": [ - "resolve_app_connections" - ] - }, - "UpdateAppConnection": { - "methods": [ - "update_app_connection" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AppConnectionsServiceAsyncClient", - "rpcs": { - "CreateAppConnection": { - "methods": [ - "create_app_connection" - ] - }, - "DeleteAppConnection": { - "methods": [ - "delete_app_connection" - ] - }, - "GetAppConnection": { - "methods": [ - "get_app_connection" - ] - }, - "ListAppConnections": { - "methods": [ - "list_app_connections" - ] - }, - "ResolveAppConnections": { - "methods": [ - "resolve_app_connections" - ] - }, - "UpdateAppConnection": { - "methods": [ - "update_app_connection" - ] - } - } - }, - "rest": { - "libraryClient": "AppConnectionsServiceClient", - "rpcs": { - "CreateAppConnection": { - "methods": [ - "create_app_connection" - ] - }, - "DeleteAppConnection": { - "methods": [ - "delete_app_connection" - ] - }, - "GetAppConnection": { - "methods": [ - "get_app_connection" - ] - }, - "ListAppConnections": { - "methods": [ - "list_app_connections" - ] - }, - "ResolveAppConnections": { - "methods": [ - "resolve_app_connections" - ] - }, - "UpdateAppConnection": { - "methods": [ - "update_app_connection" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/py.typed b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/py.typed deleted file mode 100644 index a82b08bb7ba0..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-appconnections package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/__init__.py deleted file mode 100644 index a9ee9957b5e7..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AppConnectionsServiceClient -from .async_client import AppConnectionsServiceAsyncClient - -__all__ = ( - 'AppConnectionsServiceClient', - 'AppConnectionsServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py deleted file mode 100644 index b76a7d2f6357..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py +++ /dev/null @@ -1,1684 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.beyondcorp_appconnections_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service import pagers -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AppConnectionsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AppConnectionsServiceGrpcAsyncIOTransport -from .client import AppConnectionsServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AppConnectionsServiceAsyncClient: - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectionsService exposes the following resources: - - - AppConnections, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}``. - - The AppConnectionsService service provides methods to manage - (create/read/update/delete) BeyondCorp AppConnections. - """ - - _client: AppConnectionsServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AppConnectionsServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AppConnectionsServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AppConnectionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AppConnectionsServiceClient._DEFAULT_UNIVERSE - - app_connection_path = staticmethod(AppConnectionsServiceClient.app_connection_path) - parse_app_connection_path = staticmethod(AppConnectionsServiceClient.parse_app_connection_path) - app_connector_path = staticmethod(AppConnectionsServiceClient.app_connector_path) - parse_app_connector_path = staticmethod(AppConnectionsServiceClient.parse_app_connector_path) - app_gateway_path = staticmethod(AppConnectionsServiceClient.app_gateway_path) - parse_app_gateway_path = staticmethod(AppConnectionsServiceClient.parse_app_gateway_path) - common_billing_account_path = staticmethod(AppConnectionsServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AppConnectionsServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AppConnectionsServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AppConnectionsServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AppConnectionsServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AppConnectionsServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AppConnectionsServiceClient.common_project_path) - parse_common_project_path = staticmethod(AppConnectionsServiceClient.parse_common_project_path) - common_location_path = staticmethod(AppConnectionsServiceClient.common_location_path) - parse_common_location_path = staticmethod(AppConnectionsServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectionsServiceAsyncClient: The constructed client. - """ - return AppConnectionsServiceClient.from_service_account_info.__func__(AppConnectionsServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectionsServiceAsyncClient: The constructed client. - """ - return AppConnectionsServiceClient.from_service_account_file.__func__(AppConnectionsServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AppConnectionsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AppConnectionsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AppConnectionsServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AppConnectionsServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AppConnectionsServiceTransport, Callable[..., AppConnectionsServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the app connections service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AppConnectionsServiceTransport,Callable[..., AppConnectionsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AppConnectionsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AppConnectionsServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "credentialsType": None, - } - ) - - async def list_app_connections(self, - request: Optional[Union[app_connections_service.ListAppConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAppConnectionsAsyncPager: - r"""Lists AppConnections in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - async def sample_list_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ListAppConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsRequest, dict]]): - The request object. Request message for - BeyondCorp.ListAppConnections. - parent (:class:`str`): - Required. The resource name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ListAppConnectionsAsyncPager: - Response message for - BeyondCorp.ListAppConnections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.ListAppConnectionsRequest): - request = app_connections_service.ListAppConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_app_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAppConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_app_connection(self, - request: Optional[Union[app_connections_service.GetAppConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> app_connections_service.AppConnection: - r"""Gets details of a single AppConnection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - async def sample_get_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.GetAppConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_app_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnections_v1.types.GetAppConnectionRequest, dict]]): - The request object. Request message for - BeyondCorp.GetAppConnection. - name (:class:`str`): - Required. BeyondCorp AppConnection name using the form: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnections_v1.types.AppConnection: - A BeyondCorp AppConnection resource - represents a BeyondCorp protected - AppConnection to a remote application. - It creates all the necessary GCP - components needed for creating a - BeyondCorp protected AppConnection. - Multiple connectors can be authorised - for a single AppConnection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.GetAppConnectionRequest): - request = app_connections_service.GetAppConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_app_connection(self, - request: Optional[Union[app_connections_service.CreateAppConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - app_connection: Optional[app_connections_service.AppConnection] = None, - app_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new AppConnection in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - async def sample_create_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.CreateAppConnectionRequest( - parent="parent_value", - app_connection=app_connection, - ) - - # Make the request - operation = client.create_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnections_v1.types.CreateAppConnectionRequest, dict]]): - The request object. Request message for - BeyondCorp.CreateAppConnection. - parent (:class:`str`): - Required. The resource project name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connection (:class:`google.cloud.beyondcorp_appconnections_v1.types.AppConnection`): - Required. A BeyondCorp AppConnection - resource. - - This corresponds to the ``app_connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connection_id (:class:`str`): - Optional. User-settable AppConnection resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``app_connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnections_v1.types.AppConnection` A BeyondCorp AppConnection resource represents a BeyondCorp protected - AppConnection to a remote application. It creates all - the necessary GCP components needed for creating a - BeyondCorp protected AppConnection. Multiple - connectors can be authorised for a single - AppConnection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, app_connection, app_connection_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.CreateAppConnectionRequest): - request = app_connections_service.CreateAppConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if app_connection is not None: - request.app_connection = app_connection - if app_connection_id is not None: - request.app_connection_id = app_connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - app_connections_service.AppConnection, - metadata_type=app_connections_service.AppConnectionOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_app_connection(self, - request: Optional[Union[app_connections_service.UpdateAppConnectionRequest, dict]] = None, - *, - app_connection: Optional[app_connections_service.AppConnection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single AppConnection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - async def sample_update_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.UpdateAppConnectionRequest( - app_connection=app_connection, - ) - - # Make the request - operation = client.update_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnections_v1.types.UpdateAppConnectionRequest, dict]]): - The request object. Request message for - BeyondCorp.UpdateAppConnection. - app_connection (:class:`google.cloud.beyondcorp_appconnections_v1.types.AppConnection`): - Required. AppConnection message with updated fields. - Only supported fields specified in update_mask are - updated. - - This corresponds to the ``app_connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. At least one path - must be supplied in this field. The elements of the - repeated paths field may only include these fields from - [BeyondCorp.AppConnection]: - - - ``labels`` - - ``display_name`` - - ``application_endpoint`` - - ``connectors`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnections_v1.types.AppConnection` A BeyondCorp AppConnection resource represents a BeyondCorp protected - AppConnection to a remote application. It creates all - the necessary GCP components needed for creating a - BeyondCorp protected AppConnection. Multiple - connectors can be authorised for a single - AppConnection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [app_connection, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.UpdateAppConnectionRequest): - request = app_connections_service.UpdateAppConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if app_connection is not None: - request.app_connection = app_connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("app_connection.name", request.app_connection.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - app_connections_service.AppConnection, - metadata_type=app_connections_service.AppConnectionOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_app_connection(self, - request: Optional[Union[app_connections_service.DeleteAppConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single AppConnection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - async def sample_delete_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.DeleteAppConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnections_v1.types.DeleteAppConnectionRequest, dict]]): - The request object. Request message for - BeyondCorp.DeleteAppConnection. - name (:class:`str`): - Required. BeyondCorp Connector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.DeleteAppConnectionRequest): - request = app_connections_service.DeleteAppConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=app_connections_service.AppConnectionOperationMetadata, - ) - - # Done; return the response. - return response - - async def resolve_app_connections(self, - request: Optional[Union[app_connections_service.ResolveAppConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ResolveAppConnectionsAsyncPager: - r"""Resolves AppConnections details for a given - AppConnector. An internal method called by a connector - to find AppConnections to connect to. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - async def sample_resolve_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ResolveAppConnectionsRequest( - parent="parent_value", - app_connector_id="app_connector_id_value", - ) - - # Make the request - page_result = client.resolve_app_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsRequest, dict]]): - The request object. Request message for - BeyondCorp.ResolveAppConnections. - parent (:class:`str`): - Required. The resource name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ResolveAppConnectionsAsyncPager: - Response message for - BeyondCorp.ResolveAppConnections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.ResolveAppConnectionsRequest): - request = app_connections_service.ResolveAppConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.resolve_app_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ResolveAppConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AppConnectionsServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AppConnectionsServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py deleted file mode 100644 index 844319ecf2e5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py +++ /dev/null @@ -1,2081 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.beyondcorp_appconnections_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service import pagers -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AppConnectionsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AppConnectionsServiceGrpcTransport -from .transports.grpc_asyncio import AppConnectionsServiceGrpcAsyncIOTransport -from .transports.rest import AppConnectionsServiceRestTransport - - -class AppConnectionsServiceClientMeta(type): - """Metaclass for the AppConnectionsService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AppConnectionsServiceTransport]] - _transport_registry["grpc"] = AppConnectionsServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AppConnectionsServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AppConnectionsServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AppConnectionsServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AppConnectionsServiceClient(metaclass=AppConnectionsServiceClientMeta): - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectionsService exposes the following resources: - - - AppConnections, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}``. - - The AppConnectionsService service provides methods to manage - (create/read/update/delete) BeyondCorp AppConnections. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "beyondcorp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "beyondcorp.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectionsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectionsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AppConnectionsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AppConnectionsServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def app_connection_path(project: str,location: str,app_connection: str,) -> str: - """Returns a fully-qualified app_connection string.""" - return "projects/{project}/locations/{location}/appConnections/{app_connection}".format(project=project, location=location, app_connection=app_connection, ) - - @staticmethod - def parse_app_connection_path(path: str) -> Dict[str,str]: - """Parses a app_connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/appConnections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def app_connector_path(project: str,location: str,app_connector: str,) -> str: - """Returns a fully-qualified app_connector string.""" - return "projects/{project}/locations/{location}/appConnectors/{app_connector}".format(project=project, location=location, app_connector=app_connector, ) - - @staticmethod - def parse_app_connector_path(path: str) -> Dict[str,str]: - """Parses a app_connector path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/appConnectors/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def app_gateway_path(project: str,location: str,app_gateway: str,) -> str: - """Returns a fully-qualified app_gateway string.""" - return "projects/{project}/locations/{location}/appGateways/{app_gateway}".format(project=project, location=location, app_gateway=app_gateway, ) - - @staticmethod - def parse_app_gateway_path(path: str) -> Dict[str,str]: - """Parses a app_gateway path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/appGateways/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AppConnectionsServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AppConnectionsServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AppConnectionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AppConnectionsServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AppConnectionsServiceTransport, Callable[..., AppConnectionsServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the app connections service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AppConnectionsServiceTransport,Callable[..., AppConnectionsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AppConnectionsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AppConnectionsServiceClient._read_environment_variables() - self._client_cert_source = AppConnectionsServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AppConnectionsServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AppConnectionsServiceTransport) - if transport_provided: - # transport is a AppConnectionsServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AppConnectionsServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AppConnectionsServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AppConnectionsServiceTransport], Callable[..., AppConnectionsServiceTransport]] = ( - AppConnectionsServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AppConnectionsServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "credentialsType": None, - } - ) - - def list_app_connections(self, - request: Optional[Union[app_connections_service.ListAppConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAppConnectionsPager: - r"""Lists AppConnections in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - def sample_list_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ListAppConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsRequest, dict]): - The request object. Request message for - BeyondCorp.ListAppConnections. - parent (str): - Required. The resource name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ListAppConnectionsPager: - Response message for - BeyondCorp.ListAppConnections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.ListAppConnectionsRequest): - request = app_connections_service.ListAppConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_app_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAppConnectionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_app_connection(self, - request: Optional[Union[app_connections_service.GetAppConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> app_connections_service.AppConnection: - r"""Gets details of a single AppConnection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - def sample_get_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.GetAppConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_app_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnections_v1.types.GetAppConnectionRequest, dict]): - The request object. Request message for - BeyondCorp.GetAppConnection. - name (str): - Required. BeyondCorp AppConnection name using the form: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnections_v1.types.AppConnection: - A BeyondCorp AppConnection resource - represents a BeyondCorp protected - AppConnection to a remote application. - It creates all the necessary GCP - components needed for creating a - BeyondCorp protected AppConnection. - Multiple connectors can be authorised - for a single AppConnection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.GetAppConnectionRequest): - request = app_connections_service.GetAppConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_app_connection(self, - request: Optional[Union[app_connections_service.CreateAppConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - app_connection: Optional[app_connections_service.AppConnection] = None, - app_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new AppConnection in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - def sample_create_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.CreateAppConnectionRequest( - parent="parent_value", - app_connection=app_connection, - ) - - # Make the request - operation = client.create_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnections_v1.types.CreateAppConnectionRequest, dict]): - The request object. Request message for - BeyondCorp.CreateAppConnection. - parent (str): - Required. The resource project name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connection (google.cloud.beyondcorp_appconnections_v1.types.AppConnection): - Required. A BeyondCorp AppConnection - resource. - - This corresponds to the ``app_connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connection_id (str): - Optional. User-settable AppConnection resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``app_connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnections_v1.types.AppConnection` A BeyondCorp AppConnection resource represents a BeyondCorp protected - AppConnection to a remote application. It creates all - the necessary GCP components needed for creating a - BeyondCorp protected AppConnection. Multiple - connectors can be authorised for a single - AppConnection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, app_connection, app_connection_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.CreateAppConnectionRequest): - request = app_connections_service.CreateAppConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if app_connection is not None: - request.app_connection = app_connection - if app_connection_id is not None: - request.app_connection_id = app_connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - app_connections_service.AppConnection, - metadata_type=app_connections_service.AppConnectionOperationMetadata, - ) - - # Done; return the response. - return response - - def update_app_connection(self, - request: Optional[Union[app_connections_service.UpdateAppConnectionRequest, dict]] = None, - *, - app_connection: Optional[app_connections_service.AppConnection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the parameters of a single AppConnection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - def sample_update_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.UpdateAppConnectionRequest( - app_connection=app_connection, - ) - - # Make the request - operation = client.update_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnections_v1.types.UpdateAppConnectionRequest, dict]): - The request object. Request message for - BeyondCorp.UpdateAppConnection. - app_connection (google.cloud.beyondcorp_appconnections_v1.types.AppConnection): - Required. AppConnection message with updated fields. - Only supported fields specified in update_mask are - updated. - - This corresponds to the ``app_connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. At least one path - must be supplied in this field. The elements of the - repeated paths field may only include these fields from - [BeyondCorp.AppConnection]: - - - ``labels`` - - ``display_name`` - - ``application_endpoint`` - - ``connectors`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnections_v1.types.AppConnection` A BeyondCorp AppConnection resource represents a BeyondCorp protected - AppConnection to a remote application. It creates all - the necessary GCP components needed for creating a - BeyondCorp protected AppConnection. Multiple - connectors can be authorised for a single - AppConnection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [app_connection, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.UpdateAppConnectionRequest): - request = app_connections_service.UpdateAppConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if app_connection is not None: - request.app_connection = app_connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("app_connection.name", request.app_connection.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - app_connections_service.AppConnection, - metadata_type=app_connections_service.AppConnectionOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_app_connection(self, - request: Optional[Union[app_connections_service.DeleteAppConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single AppConnection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - def sample_delete_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.DeleteAppConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnections_v1.types.DeleteAppConnectionRequest, dict]): - The request object. Request message for - BeyondCorp.DeleteAppConnection. - name (str): - Required. BeyondCorp Connector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.DeleteAppConnectionRequest): - request = app_connections_service.DeleteAppConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_app_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=app_connections_service.AppConnectionOperationMetadata, - ) - - # Done; return the response. - return response - - def resolve_app_connections(self, - request: Optional[Union[app_connections_service.ResolveAppConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ResolveAppConnectionsPager: - r"""Resolves AppConnections details for a given - AppConnector. An internal method called by a connector - to find AppConnections to connect to. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnections_v1 - - def sample_resolve_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ResolveAppConnectionsRequest( - parent="parent_value", - app_connector_id="app_connector_id_value", - ) - - # Make the request - page_result = client.resolve_app_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsRequest, dict]): - The request object. Request message for - BeyondCorp.ResolveAppConnections. - parent (str): - Required. The resource name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ResolveAppConnectionsPager: - Response message for - BeyondCorp.ResolveAppConnections. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connections_service.ResolveAppConnectionsRequest): - request = app_connections_service.ResolveAppConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.resolve_app_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ResolveAppConnectionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AppConnectionsServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AppConnectionsServiceClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/pagers.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/pagers.py deleted file mode 100644 index c54a3d39d992..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/pagers.py +++ /dev/null @@ -1,305 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service - - -class ListAppConnectionsPager: - """A pager for iterating through ``list_app_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``app_connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAppConnections`` requests and continue to iterate - through the ``app_connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., app_connections_service.ListAppConnectionsResponse], - request: app_connections_service.ListAppConnectionsRequest, - response: app_connections_service.ListAppConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsRequest): - The initial request object. - response (google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_connections_service.ListAppConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[app_connections_service.ListAppConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[app_connections_service.AppConnection]: - for page in self.pages: - yield from page.app_connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAppConnectionsAsyncPager: - """A pager for iterating through ``list_app_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``app_connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAppConnections`` requests and continue to iterate - through the ``app_connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[app_connections_service.ListAppConnectionsResponse]], - request: app_connections_service.ListAppConnectionsRequest, - response: app_connections_service.ListAppConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsRequest): - The initial request object. - response (google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_connections_service.ListAppConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[app_connections_service.ListAppConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[app_connections_service.AppConnection]: - async def async_generator(): - async for page in self.pages: - for response in page.app_connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ResolveAppConnectionsPager: - """A pager for iterating through ``resolve_app_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``app_connection_details`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ResolveAppConnections`` requests and continue to iterate - through the ``app_connection_details`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., app_connections_service.ResolveAppConnectionsResponse], - request: app_connections_service.ResolveAppConnectionsRequest, - response: app_connections_service.ResolveAppConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsRequest): - The initial request object. - response (google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_connections_service.ResolveAppConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[app_connections_service.ResolveAppConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails]: - for page in self.pages: - yield from page.app_connection_details - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ResolveAppConnectionsAsyncPager: - """A pager for iterating through ``resolve_app_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``app_connection_details`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ResolveAppConnections`` requests and continue to iterate - through the ``app_connection_details`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[app_connections_service.ResolveAppConnectionsResponse]], - request: app_connections_service.ResolveAppConnectionsRequest, - response: app_connections_service.ResolveAppConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsRequest): - The initial request object. - response (google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_connections_service.ResolveAppConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[app_connections_service.ResolveAppConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails]: - async def async_generator(): - async for page in self.pages: - for response in page.app_connection_details: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/README.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/README.rst deleted file mode 100644 index 5e6e3e418c62..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AppConnectionsServiceTransport` is the ABC for all transports. -- public child `AppConnectionsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AppConnectionsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAppConnectionsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AppConnectionsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/__init__.py deleted file mode 100644 index 9deac0c7f908..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AppConnectionsServiceTransport -from .grpc import AppConnectionsServiceGrpcTransport -from .grpc_asyncio import AppConnectionsServiceGrpcAsyncIOTransport -from .rest import AppConnectionsServiceRestTransport -from .rest import AppConnectionsServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AppConnectionsServiceTransport]] -_transport_registry['grpc'] = AppConnectionsServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AppConnectionsServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AppConnectionsServiceRestTransport - -__all__ = ( - 'AppConnectionsServiceTransport', - 'AppConnectionsServiceGrpcTransport', - 'AppConnectionsServiceGrpcAsyncIOTransport', - 'AppConnectionsServiceRestTransport', - 'AppConnectionsServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/base.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/base.py deleted file mode 100644 index 8516d30545d5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/base.py +++ /dev/null @@ -1,361 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.beyondcorp_appconnections_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AppConnectionsServiceTransport(abc.ABC): - """Abstract transport class for AppConnectionsService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'beyondcorp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_app_connections: gapic_v1.method.wrap_method( - self.list_app_connections, - default_timeout=None, - client_info=client_info, - ), - self.get_app_connection: gapic_v1.method.wrap_method( - self.get_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.create_app_connection: gapic_v1.method.wrap_method( - self.create_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.update_app_connection: gapic_v1.method.wrap_method( - self.update_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.delete_app_connection: gapic_v1.method.wrap_method( - self.delete_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.resolve_app_connections: gapic_v1.method.wrap_method( - self.resolve_app_connections, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_app_connections(self) -> Callable[ - [app_connections_service.ListAppConnectionsRequest], - Union[ - app_connections_service.ListAppConnectionsResponse, - Awaitable[app_connections_service.ListAppConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_app_connection(self) -> Callable[ - [app_connections_service.GetAppConnectionRequest], - Union[ - app_connections_service.AppConnection, - Awaitable[app_connections_service.AppConnection] - ]]: - raise NotImplementedError() - - @property - def create_app_connection(self) -> Callable[ - [app_connections_service.CreateAppConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_app_connection(self) -> Callable[ - [app_connections_service.UpdateAppConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_app_connection(self) -> Callable[ - [app_connections_service.DeleteAppConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def resolve_app_connections(self) -> Callable[ - [app_connections_service.ResolveAppConnectionsRequest], - Union[ - app_connections_service.ResolveAppConnectionsResponse, - Awaitable[app_connections_service.ResolveAppConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AppConnectionsServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/grpc.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/grpc.py deleted file mode 100644 index 104bdb97e105..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/grpc.py +++ /dev/null @@ -1,702 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import AppConnectionsServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AppConnectionsServiceGrpcTransport(AppConnectionsServiceTransport): - """gRPC backend transport for AppConnectionsService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectionsService exposes the following resources: - - - AppConnections, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}``. - - The AppConnectionsService service provides methods to manage - (create/read/update/delete) BeyondCorp AppConnections. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_app_connections(self) -> Callable[ - [app_connections_service.ListAppConnectionsRequest], - app_connections_service.ListAppConnectionsResponse]: - r"""Return a callable for the list app connections method over gRPC. - - Lists AppConnections in a given project and location. - - Returns: - Callable[[~.ListAppConnectionsRequest], - ~.ListAppConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_app_connections' not in self._stubs: - self._stubs['list_app_connections'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/ListAppConnections', - request_serializer=app_connections_service.ListAppConnectionsRequest.serialize, - response_deserializer=app_connections_service.ListAppConnectionsResponse.deserialize, - ) - return self._stubs['list_app_connections'] - - @property - def get_app_connection(self) -> Callable[ - [app_connections_service.GetAppConnectionRequest], - app_connections_service.AppConnection]: - r"""Return a callable for the get app connection method over gRPC. - - Gets details of a single AppConnection. - - Returns: - Callable[[~.GetAppConnectionRequest], - ~.AppConnection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_app_connection' not in self._stubs: - self._stubs['get_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/GetAppConnection', - request_serializer=app_connections_service.GetAppConnectionRequest.serialize, - response_deserializer=app_connections_service.AppConnection.deserialize, - ) - return self._stubs['get_app_connection'] - - @property - def create_app_connection(self) -> Callable[ - [app_connections_service.CreateAppConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the create app connection method over gRPC. - - Creates a new AppConnection in a given project and - location. - - Returns: - Callable[[~.CreateAppConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_app_connection' not in self._stubs: - self._stubs['create_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/CreateAppConnection', - request_serializer=app_connections_service.CreateAppConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_app_connection'] - - @property - def update_app_connection(self) -> Callable[ - [app_connections_service.UpdateAppConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the update app connection method over gRPC. - - Updates the parameters of a single AppConnection. - - Returns: - Callable[[~.UpdateAppConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_app_connection' not in self._stubs: - self._stubs['update_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/UpdateAppConnection', - request_serializer=app_connections_service.UpdateAppConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_app_connection'] - - @property - def delete_app_connection(self) -> Callable[ - [app_connections_service.DeleteAppConnectionRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete app connection method over gRPC. - - Deletes a single AppConnection. - - Returns: - Callable[[~.DeleteAppConnectionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_app_connection' not in self._stubs: - self._stubs['delete_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/DeleteAppConnection', - request_serializer=app_connections_service.DeleteAppConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_app_connection'] - - @property - def resolve_app_connections(self) -> Callable[ - [app_connections_service.ResolveAppConnectionsRequest], - app_connections_service.ResolveAppConnectionsResponse]: - r"""Return a callable for the resolve app connections method over gRPC. - - Resolves AppConnections details for a given - AppConnector. An internal method called by a connector - to find AppConnections to connect to. - - Returns: - Callable[[~.ResolveAppConnectionsRequest], - ~.ResolveAppConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resolve_app_connections' not in self._stubs: - self._stubs['resolve_app_connections'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/ResolveAppConnections', - request_serializer=app_connections_service.ResolveAppConnectionsRequest.serialize, - response_deserializer=app_connections_service.ResolveAppConnectionsResponse.deserialize, - ) - return self._stubs['resolve_app_connections'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AppConnectionsServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/grpc_asyncio.py deleted file mode 100644 index 4b3254e8b360..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,792 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import AppConnectionsServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AppConnectionsServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AppConnectionsServiceGrpcAsyncIOTransport(AppConnectionsServiceTransport): - """gRPC AsyncIO backend transport for AppConnectionsService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectionsService exposes the following resources: - - - AppConnections, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}``. - - The AppConnectionsService service provides methods to manage - (create/read/update/delete) BeyondCorp AppConnections. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_app_connections(self) -> Callable[ - [app_connections_service.ListAppConnectionsRequest], - Awaitable[app_connections_service.ListAppConnectionsResponse]]: - r"""Return a callable for the list app connections method over gRPC. - - Lists AppConnections in a given project and location. - - Returns: - Callable[[~.ListAppConnectionsRequest], - Awaitable[~.ListAppConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_app_connections' not in self._stubs: - self._stubs['list_app_connections'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/ListAppConnections', - request_serializer=app_connections_service.ListAppConnectionsRequest.serialize, - response_deserializer=app_connections_service.ListAppConnectionsResponse.deserialize, - ) - return self._stubs['list_app_connections'] - - @property - def get_app_connection(self) -> Callable[ - [app_connections_service.GetAppConnectionRequest], - Awaitable[app_connections_service.AppConnection]]: - r"""Return a callable for the get app connection method over gRPC. - - Gets details of a single AppConnection. - - Returns: - Callable[[~.GetAppConnectionRequest], - Awaitable[~.AppConnection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_app_connection' not in self._stubs: - self._stubs['get_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/GetAppConnection', - request_serializer=app_connections_service.GetAppConnectionRequest.serialize, - response_deserializer=app_connections_service.AppConnection.deserialize, - ) - return self._stubs['get_app_connection'] - - @property - def create_app_connection(self) -> Callable[ - [app_connections_service.CreateAppConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create app connection method over gRPC. - - Creates a new AppConnection in a given project and - location. - - Returns: - Callable[[~.CreateAppConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_app_connection' not in self._stubs: - self._stubs['create_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/CreateAppConnection', - request_serializer=app_connections_service.CreateAppConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_app_connection'] - - @property - def update_app_connection(self) -> Callable[ - [app_connections_service.UpdateAppConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update app connection method over gRPC. - - Updates the parameters of a single AppConnection. - - Returns: - Callable[[~.UpdateAppConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_app_connection' not in self._stubs: - self._stubs['update_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/UpdateAppConnection', - request_serializer=app_connections_service.UpdateAppConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_app_connection'] - - @property - def delete_app_connection(self) -> Callable[ - [app_connections_service.DeleteAppConnectionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete app connection method over gRPC. - - Deletes a single AppConnection. - - Returns: - Callable[[~.DeleteAppConnectionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_app_connection' not in self._stubs: - self._stubs['delete_app_connection'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/DeleteAppConnection', - request_serializer=app_connections_service.DeleteAppConnectionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_app_connection'] - - @property - def resolve_app_connections(self) -> Callable[ - [app_connections_service.ResolveAppConnectionsRequest], - Awaitable[app_connections_service.ResolveAppConnectionsResponse]]: - r"""Return a callable for the resolve app connections method over gRPC. - - Resolves AppConnections details for a given - AppConnector. An internal method called by a connector - to find AppConnections to connect to. - - Returns: - Callable[[~.ResolveAppConnectionsRequest], - Awaitable[~.ResolveAppConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'resolve_app_connections' not in self._stubs: - self._stubs['resolve_app_connections'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnections.v1.AppConnectionsService/ResolveAppConnections', - request_serializer=app_connections_service.ResolveAppConnectionsRequest.serialize, - response_deserializer=app_connections_service.ResolveAppConnectionsResponse.deserialize, - ) - return self._stubs['resolve_app_connections'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_app_connections: self._wrap_method( - self.list_app_connections, - default_timeout=None, - client_info=client_info, - ), - self.get_app_connection: self._wrap_method( - self.get_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.create_app_connection: self._wrap_method( - self.create_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.update_app_connection: self._wrap_method( - self.update_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.delete_app_connection: self._wrap_method( - self.delete_app_connection, - default_timeout=None, - client_info=client_info, - ), - self.resolve_app_connections: self._wrap_method( - self.resolve_app_connections, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'AppConnectionsServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py deleted file mode 100644 index 7497ce5e437d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest.py +++ /dev/null @@ -1,2506 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAppConnectionsServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AppConnectionsServiceRestInterceptor: - """Interceptor for AppConnectionsService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AppConnectionsServiceRestTransport. - - .. code-block:: python - class MyCustomAppConnectionsServiceInterceptor(AppConnectionsServiceRestInterceptor): - def pre_create_app_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_app_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_app_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_app_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_app_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_app_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_app_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_app_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_resolve_app_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_resolve_app_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_app_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_app_connection(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AppConnectionsServiceRestTransport(interceptor=MyCustomAppConnectionsServiceInterceptor()) - client = AppConnectionsServiceClient(transport=transport) - - - """ - def pre_create_app_connection(self, request: app_connections_service.CreateAppConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.CreateAppConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_app_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_create_app_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_app_connection - - DEPRECATED. Please use the `post_create_app_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. This `post_create_app_connection` interceptor runs - before the `post_create_app_connection_with_metadata` interceptor. - """ - return response - - def post_create_app_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_app_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectionsService server but before it is returned to user code. - - We recommend only using this `post_create_app_connection_with_metadata` - interceptor in new development instead of the `post_create_app_connection` interceptor. - When both interceptors are used, this `post_create_app_connection_with_metadata` interceptor runs after the - `post_create_app_connection` interceptor. The (possibly modified) response returned by - `post_create_app_connection` will be passed to - `post_create_app_connection_with_metadata`. - """ - return response, metadata - - def pre_delete_app_connection(self, request: app_connections_service.DeleteAppConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.DeleteAppConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_app_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_delete_app_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_app_connection - - DEPRECATED. Please use the `post_delete_app_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. This `post_delete_app_connection` interceptor runs - before the `post_delete_app_connection_with_metadata` interceptor. - """ - return response - - def post_delete_app_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_app_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectionsService server but before it is returned to user code. - - We recommend only using this `post_delete_app_connection_with_metadata` - interceptor in new development instead of the `post_delete_app_connection` interceptor. - When both interceptors are used, this `post_delete_app_connection_with_metadata` interceptor runs after the - `post_delete_app_connection` interceptor. The (possibly modified) response returned by - `post_delete_app_connection` will be passed to - `post_delete_app_connection_with_metadata`. - """ - return response, metadata - - def pre_get_app_connection(self, request: app_connections_service.GetAppConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.GetAppConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_app_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_get_app_connection(self, response: app_connections_service.AppConnection) -> app_connections_service.AppConnection: - """Post-rpc interceptor for get_app_connection - - DEPRECATED. Please use the `post_get_app_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. This `post_get_app_connection` interceptor runs - before the `post_get_app_connection_with_metadata` interceptor. - """ - return response - - def post_get_app_connection_with_metadata(self, response: app_connections_service.AppConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.AppConnection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_app_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectionsService server but before it is returned to user code. - - We recommend only using this `post_get_app_connection_with_metadata` - interceptor in new development instead of the `post_get_app_connection` interceptor. - When both interceptors are used, this `post_get_app_connection_with_metadata` interceptor runs after the - `post_get_app_connection` interceptor. The (possibly modified) response returned by - `post_get_app_connection` will be passed to - `post_get_app_connection_with_metadata`. - """ - return response, metadata - - def pre_list_app_connections(self, request: app_connections_service.ListAppConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.ListAppConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_app_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_list_app_connections(self, response: app_connections_service.ListAppConnectionsResponse) -> app_connections_service.ListAppConnectionsResponse: - """Post-rpc interceptor for list_app_connections - - DEPRECATED. Please use the `post_list_app_connections_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. This `post_list_app_connections` interceptor runs - before the `post_list_app_connections_with_metadata` interceptor. - """ - return response - - def post_list_app_connections_with_metadata(self, response: app_connections_service.ListAppConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.ListAppConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_app_connections - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectionsService server but before it is returned to user code. - - We recommend only using this `post_list_app_connections_with_metadata` - interceptor in new development instead of the `post_list_app_connections` interceptor. - When both interceptors are used, this `post_list_app_connections_with_metadata` interceptor runs after the - `post_list_app_connections` interceptor. The (possibly modified) response returned by - `post_list_app_connections` will be passed to - `post_list_app_connections_with_metadata`. - """ - return response, metadata - - def pre_resolve_app_connections(self, request: app_connections_service.ResolveAppConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.ResolveAppConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for resolve_app_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_resolve_app_connections(self, response: app_connections_service.ResolveAppConnectionsResponse) -> app_connections_service.ResolveAppConnectionsResponse: - """Post-rpc interceptor for resolve_app_connections - - DEPRECATED. Please use the `post_resolve_app_connections_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. This `post_resolve_app_connections` interceptor runs - before the `post_resolve_app_connections_with_metadata` interceptor. - """ - return response - - def post_resolve_app_connections_with_metadata(self, response: app_connections_service.ResolveAppConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.ResolveAppConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for resolve_app_connections - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectionsService server but before it is returned to user code. - - We recommend only using this `post_resolve_app_connections_with_metadata` - interceptor in new development instead of the `post_resolve_app_connections` interceptor. - When both interceptors are used, this `post_resolve_app_connections_with_metadata` interceptor runs after the - `post_resolve_app_connections` interceptor. The (possibly modified) response returned by - `post_resolve_app_connections` will be passed to - `post_resolve_app_connections_with_metadata`. - """ - return response, metadata - - def pre_update_app_connection(self, request: app_connections_service.UpdateAppConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connections_service.UpdateAppConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_app_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_update_app_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_app_connection - - DEPRECATED. Please use the `post_update_app_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. This `post_update_app_connection` interceptor runs - before the `post_update_app_connection_with_metadata` interceptor. - """ - return response - - def post_update_app_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_app_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectionsService server but before it is returned to user code. - - We recommend only using this `post_update_app_connection_with_metadata` - interceptor in new development instead of the `post_update_app_connection` interceptor. - When both interceptors are used, this `post_update_app_connection_with_metadata` interceptor runs after the - `post_update_app_connection` interceptor. The (possibly modified) response returned by - `post_update_app_connection` will be passed to - `post_update_app_connection_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectionsService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the AppConnectionsService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AppConnectionsServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AppConnectionsServiceRestInterceptor - - -class AppConnectionsServiceRestTransport(_BaseAppConnectionsServiceRestTransport): - """REST backend synchronous transport for AppConnectionsService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectionsService exposes the following resources: - - - AppConnections, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}``. - - The AppConnectionsService service provides methods to manage - (create/read/update/delete) BeyondCorp AppConnections. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AppConnectionsServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AppConnectionsServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateAppConnection(_BaseAppConnectionsServiceRestTransport._BaseCreateAppConnection, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.CreateAppConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: app_connections_service.CreateAppConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create app connection method over HTTP. - - Args: - request (~.app_connections_service.CreateAppConnectionRequest): - The request object. Request message for - BeyondCorp.CreateAppConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseCreateAppConnection._get_http_options() - - request, metadata = self._interceptor.pre_create_app_connection(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseCreateAppConnection._get_transcoded_request(http_options, request) - - body = _BaseAppConnectionsServiceRestTransport._BaseCreateAppConnection._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseCreateAppConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.CreateAppConnection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "CreateAppConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._CreateAppConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_app_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_app_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.create_app_connection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "CreateAppConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteAppConnection(_BaseAppConnectionsServiceRestTransport._BaseDeleteAppConnection, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.DeleteAppConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connections_service.DeleteAppConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete app connection method over HTTP. - - Args: - request (~.app_connections_service.DeleteAppConnectionRequest): - The request object. Request message for - BeyondCorp.DeleteAppConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseDeleteAppConnection._get_http_options() - - request, metadata = self._interceptor.pre_delete_app_connection(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseDeleteAppConnection._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseDeleteAppConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.DeleteAppConnection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "DeleteAppConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._DeleteAppConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_app_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_app_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.delete_app_connection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "DeleteAppConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAppConnection(_BaseAppConnectionsServiceRestTransport._BaseGetAppConnection, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.GetAppConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connections_service.GetAppConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_connections_service.AppConnection: - r"""Call the get app connection method over HTTP. - - Args: - request (~.app_connections_service.GetAppConnectionRequest): - The request object. Request message for - BeyondCorp.GetAppConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_connections_service.AppConnection: - A BeyondCorp AppConnection resource - represents a BeyondCorp protected - AppConnection to a remote application. - It creates all the necessary GCP - components needed for creating a - BeyondCorp protected AppConnection. - Multiple connectors can be authorised - for a single AppConnection. - - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseGetAppConnection._get_http_options() - - request, metadata = self._interceptor.pre_get_app_connection(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseGetAppConnection._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseGetAppConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.GetAppConnection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetAppConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._GetAppConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_connections_service.AppConnection() - pb_resp = app_connections_service.AppConnection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_app_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_app_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_connections_service.AppConnection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.get_app_connection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetAppConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAppConnections(_BaseAppConnectionsServiceRestTransport._BaseListAppConnections, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.ListAppConnections") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connections_service.ListAppConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_connections_service.ListAppConnectionsResponse: - r"""Call the list app connections method over HTTP. - - Args: - request (~.app_connections_service.ListAppConnectionsRequest): - The request object. Request message for - BeyondCorp.ListAppConnections. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_connections_service.ListAppConnectionsResponse: - Response message for - BeyondCorp.ListAppConnections. - - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseListAppConnections._get_http_options() - - request, metadata = self._interceptor.pre_list_app_connections(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseListAppConnections._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseListAppConnections._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.ListAppConnections", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ListAppConnections", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._ListAppConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_connections_service.ListAppConnectionsResponse() - pb_resp = app_connections_service.ListAppConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_app_connections(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_app_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_connections_service.ListAppConnectionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.list_app_connections", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ListAppConnections", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ResolveAppConnections(_BaseAppConnectionsServiceRestTransport._BaseResolveAppConnections, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.ResolveAppConnections") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connections_service.ResolveAppConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_connections_service.ResolveAppConnectionsResponse: - r"""Call the resolve app connections method over HTTP. - - Args: - request (~.app_connections_service.ResolveAppConnectionsRequest): - The request object. Request message for - BeyondCorp.ResolveAppConnections. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_connections_service.ResolveAppConnectionsResponse: - Response message for - BeyondCorp.ResolveAppConnections. - - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseResolveAppConnections._get_http_options() - - request, metadata = self._interceptor.pre_resolve_app_connections(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseResolveAppConnections._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseResolveAppConnections._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.ResolveAppConnections", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ResolveAppConnections", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._ResolveAppConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_connections_service.ResolveAppConnectionsResponse() - pb_resp = app_connections_service.ResolveAppConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_resolve_app_connections(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_resolve_app_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_connections_service.ResolveAppConnectionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.resolve_app_connections", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ResolveAppConnections", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateAppConnection(_BaseAppConnectionsServiceRestTransport._BaseUpdateAppConnection, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.UpdateAppConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: app_connections_service.UpdateAppConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update app connection method over HTTP. - - Args: - request (~.app_connections_service.UpdateAppConnectionRequest): - The request object. Request message for - BeyondCorp.UpdateAppConnection. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseUpdateAppConnection._get_http_options() - - request, metadata = self._interceptor.pre_update_app_connection(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseUpdateAppConnection._get_transcoded_request(http_options, request) - - body = _BaseAppConnectionsServiceRestTransport._BaseUpdateAppConnection._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseUpdateAppConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.UpdateAppConnection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "UpdateAppConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._UpdateAppConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_app_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_app_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.update_app_connection", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "UpdateAppConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_app_connection(self) -> Callable[ - [app_connections_service.CreateAppConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAppConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_app_connection(self) -> Callable[ - [app_connections_service.DeleteAppConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAppConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_app_connection(self) -> Callable[ - [app_connections_service.GetAppConnectionRequest], - app_connections_service.AppConnection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAppConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_app_connections(self) -> Callable[ - [app_connections_service.ListAppConnectionsRequest], - app_connections_service.ListAppConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAppConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def resolve_app_connections(self) -> Callable[ - [app_connections_service.ResolveAppConnectionsRequest], - app_connections_service.ResolveAppConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ResolveAppConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_app_connection(self) -> Callable[ - [app_connections_service.UpdateAppConnectionRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateAppConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseAppConnectionsServiceRestTransport._BaseGetLocation, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseAppConnectionsServiceRestTransport._BaseListLocations, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(_BaseAppConnectionsServiceRestTransport._BaseGetIamPolicy, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(_BaseAppConnectionsServiceRestTransport._BaseSetIamPolicy, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseAppConnectionsServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "SetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(_BaseAppConnectionsServiceRestTransport._BaseTestIamPermissions, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseAppConnectionsServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "TestIamPermissions", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseAppConnectionsServiceRestTransport._BaseCancelOperation, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseAppConnectionsServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseAppConnectionsServiceRestTransport._BaseDeleteOperation, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAppConnectionsServiceRestTransport._BaseGetOperation, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseAppConnectionsServiceRestTransport._BaseListOperations, AppConnectionsServiceRestStub): - def __hash__(self): - return hash("AppConnectionsServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseAppConnectionsServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseAppConnectionsServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectionsServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectionsServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnections_v1.AppConnectionsServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AppConnectionsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest_base.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest_base.py deleted file mode 100644 index 59a932b315d4..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/transports/rest_base.py +++ /dev/null @@ -1,633 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import AppConnectionsServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAppConnectionsServiceRestTransport(AppConnectionsServiceTransport): - """Base REST backend transport for AppConnectionsService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateAppConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/appConnections', - 'body': 'app_connection', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connections_service.CreateAppConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectionsServiceRestTransport._BaseCreateAppConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteAppConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/appConnections/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connections_service.DeleteAppConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectionsServiceRestTransport._BaseDeleteAppConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAppConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/appConnections/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connections_service.GetAppConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectionsServiceRestTransport._BaseGetAppConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAppConnections: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/appConnections', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connections_service.ListAppConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectionsServiceRestTransport._BaseListAppConnections._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseResolveAppConnections: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "appConnectorId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/appConnections:resolve', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connections_service.ResolveAppConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectionsServiceRestTransport._BaseResolveAppConnections._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateAppConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{app_connection.name=projects/*/locations/*/appConnections/*}', - 'body': 'app_connection', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connections_service.UpdateAppConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectionsServiceRestTransport._BaseUpdateAppConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAppConnectionsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/types/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/types/__init__.py deleted file mode 100644 index f3e01c90f83e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/types/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .app_connections_service import ( - AppConnection, - AppConnectionOperationMetadata, - CreateAppConnectionRequest, - DeleteAppConnectionRequest, - GetAppConnectionRequest, - ListAppConnectionsRequest, - ListAppConnectionsResponse, - ResolveAppConnectionsRequest, - ResolveAppConnectionsResponse, - UpdateAppConnectionRequest, -) - -__all__ = ( - 'AppConnection', - 'AppConnectionOperationMetadata', - 'CreateAppConnectionRequest', - 'DeleteAppConnectionRequest', - 'GetAppConnectionRequest', - 'ListAppConnectionsRequest', - 'ListAppConnectionsResponse', - 'ResolveAppConnectionsRequest', - 'ResolveAppConnectionsResponse', - 'UpdateAppConnectionRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/types/app_connections_service.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/types/app_connections_service.py deleted file mode 100644 index ffe7028ee0a7..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/google/cloud/beyondcorp_appconnections_v1/types/app_connections_service.py +++ /dev/null @@ -1,697 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.appconnections.v1', - manifest={ - 'ListAppConnectionsRequest', - 'ListAppConnectionsResponse', - 'GetAppConnectionRequest', - 'CreateAppConnectionRequest', - 'UpdateAppConnectionRequest', - 'DeleteAppConnectionRequest', - 'ResolveAppConnectionsRequest', - 'ResolveAppConnectionsResponse', - 'AppConnection', - 'AppConnectionOperationMetadata', - }, -) - - -class ListAppConnectionsRequest(proto.Message): - r"""Request message for BeyondCorp.ListAppConnections. - - Attributes: - parent (str): - Required. The resource name of the AppConnection location - using the form: - ``projects/{project_id}/locations/{location_id}`` - page_size (int): - Optional. The maximum number of items to return. If not - specified, a default value of 50 will be used by the - service. Regardless of the page_size value, the response may - include a partial list and a caller should only rely on - response's - [next_page_token][BeyondCorp.ListAppConnectionsResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - Optional. The next_page_token value returned from a previous - ListAppConnectionsRequest, if any. - filter (str): - Optional. A filter specifying constraints of - a list operation. - order_by (str): - Optional. Specifies the ordering of results. See `Sorting - order `__ - for more information. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAppConnectionsResponse(proto.Message): - r"""Response message for BeyondCorp.ListAppConnections. - - Attributes: - app_connections (MutableSequence[google.cloud.beyondcorp_appconnections_v1.types.AppConnection]): - A list of BeyondCorp AppConnections in the - project. - next_page_token (str): - A token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - A list of locations that could not be - reached. - """ - - @property - def raw_page(self): - return self - - app_connections: MutableSequence['AppConnection'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AppConnection', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetAppConnectionRequest(proto.Message): - r"""Request message for BeyondCorp.GetAppConnection. - - Attributes: - name (str): - Required. BeyondCorp AppConnection name using the form: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAppConnectionRequest(proto.Message): - r"""Request message for BeyondCorp.CreateAppConnection. - - Attributes: - parent (str): - Required. The resource project name of the AppConnection - location using the form: - ``projects/{project_id}/locations/{location_id}`` - app_connection_id (str): - Optional. User-settable AppConnection resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - app_connection (google.cloud.beyondcorp_appconnections_v1.types.AppConnection): - Required. A BeyondCorp AppConnection - resource. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - app_connection_id: str = proto.Field( - proto.STRING, - number=2, - ) - app_connection: 'AppConnection' = proto.Field( - proto.MESSAGE, - number=3, - message='AppConnection', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class UpdateAppConnectionRequest(proto.Message): - r"""Request message for BeyondCorp.UpdateAppConnection. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. At least one path must - be supplied in this field. The elements of the repeated - paths field may only include these fields from - [BeyondCorp.AppConnection]: - - - ``labels`` - - ``display_name`` - - ``application_endpoint`` - - ``connectors`` - app_connection (google.cloud.beyondcorp_appconnections_v1.types.AppConnection): - Required. AppConnection message with updated fields. Only - supported fields specified in update_mask are updated. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - allow_missing (bool): - Optional. If set as true, will create the - resource if it is not found. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - app_connection: 'AppConnection' = proto.Field( - proto.MESSAGE, - number=2, - message='AppConnection', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DeleteAppConnectionRequest(proto.Message): - r"""Request message for BeyondCorp.DeleteAppConnection. - - Attributes: - name (str): - Required. BeyondCorp Connector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnections/{app_connection_id}`` - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ResolveAppConnectionsRequest(proto.Message): - r"""Request message for BeyondCorp.ResolveAppConnections. - - Attributes: - parent (str): - Required. The resource name of the AppConnection location - using the form: - ``projects/{project_id}/locations/{location_id}`` - app_connector_id (str): - Required. BeyondCorp Connector name of the connector - associated with those AppConnections using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - page_size (int): - Optional. The maximum number of items to return. If not - specified, a default value of 50 will be used by the - service. Regardless of the page_size value, the response may - include a partial list and a caller should only rely on - response's - [next_page_token][BeyondCorp.ResolveAppConnectionsResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - Optional. The next_page_token value returned from a previous - ResolveAppConnectionsResponse, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - app_connector_id: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ResolveAppConnectionsResponse(proto.Message): - r"""Response message for BeyondCorp.ResolveAppConnections. - - Attributes: - app_connection_details (MutableSequence[google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsResponse.AppConnectionDetails]): - A list of BeyondCorp AppConnections with - details in the project. - next_page_token (str): - A token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - A list of locations that could not be - reached. - """ - - class AppConnectionDetails(proto.Message): - r"""Details of the AppConnection. - - Attributes: - app_connection (google.cloud.beyondcorp_appconnections_v1.types.AppConnection): - A BeyondCorp AppConnection in the project. - recent_mig_vms (MutableSequence[str]): - If type=GCP_REGIONAL_MIG, contains most recent VM instances, - like - ``https://www.googleapis.com/compute/v1/projects/{project_id}/zones/{zone_id}/instances/{instance_id}``. - """ - - app_connection: 'AppConnection' = proto.Field( - proto.MESSAGE, - number=1, - message='AppConnection', - ) - recent_mig_vms: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - @property - def raw_page(self): - return self - - app_connection_details: MutableSequence[AppConnectionDetails] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=AppConnectionDetails, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class AppConnection(proto.Message): - r"""A BeyondCorp AppConnection resource represents a BeyondCorp - protected AppConnection to a remote application. It creates all - the necessary GCP components needed for creating a BeyondCorp - protected AppConnection. Multiple connectors can be authorised - for a single AppConnection. - - Attributes: - name (str): - Required. Unique resource name of the - AppConnection. The name is ignored when creating - a AppConnection. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the resource was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the resource was - last modified. - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user - provided metadata. - display_name (str): - Optional. An arbitrary user-provided name for - the AppConnection. Cannot exceed 64 characters. - uid (str): - Output only. A unique identifier for the - instance generated by the system. - type_ (google.cloud.beyondcorp_appconnections_v1.types.AppConnection.Type): - Required. The type of network connectivity - used by the AppConnection. - application_endpoint (google.cloud.beyondcorp_appconnections_v1.types.AppConnection.ApplicationEndpoint): - Required. Address of the remote application - endpoint for the BeyondCorp AppConnection. - connectors (MutableSequence[str]): - Optional. List of - [google.cloud.beyondcorp.v1main.Connector.name] that are - authorised to be associated with this AppConnection. - state (google.cloud.beyondcorp_appconnections_v1.types.AppConnection.State): - Output only. The current state of the - AppConnection. - gateway (google.cloud.beyondcorp_appconnections_v1.types.AppConnection.Gateway): - Optional. Gateway used by the AppConnection. - """ - class Type(proto.Enum): - r"""Enum containing list of all possible network connectivity - options supported by BeyondCorp AppConnection. - - Values: - TYPE_UNSPECIFIED (0): - Default value. This value is unused. - TCP_PROXY (1): - TCP Proxy based BeyondCorp AppConnection. API - will default to this if unset. - """ - TYPE_UNSPECIFIED = 0 - TCP_PROXY = 1 - - class State(proto.Enum): - r"""Represents the different states of a AppConnection. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - CREATING (1): - AppConnection is being created. - CREATED (2): - AppConnection has been created. - UPDATING (3): - AppConnection's configuration is being - updated. - DELETING (4): - AppConnection is being deleted. - DOWN (5): - AppConnection is down and may be restored in - the future. This happens when CCFE sends - ProjectState = OFF. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - CREATED = 2 - UPDATING = 3 - DELETING = 4 - DOWN = 5 - - class ApplicationEndpoint(proto.Message): - r"""ApplicationEndpoint represents a remote application endpoint. - - Attributes: - host (str): - Required. Hostname or IP address of the - remote application endpoint. - port (int): - Required. Port of the remote application - endpoint. - """ - - host: str = proto.Field( - proto.STRING, - number=1, - ) - port: int = proto.Field( - proto.INT32, - number=2, - ) - - class Gateway(proto.Message): - r"""Gateway represents a user facing component that serves as an - entrance to enable connectivity. - - Attributes: - type_ (google.cloud.beyondcorp_appconnections_v1.types.AppConnection.Gateway.Type): - Required. The type of hosting used by the - gateway. - uri (str): - Output only. Server-defined URI for this - resource. - ingress_port (int): - Output only. Ingress port reserved on the - gateways for this AppConnection, if not - specified or zero, the default port is 19443. - app_gateway (str): - Required. AppGateway name in following format: - ``projects/{project_id}/locations/{location_id}/appgateways/{gateway_id}`` - """ - class Type(proto.Enum): - r"""Enum listing possible gateway hosting options. - - Values: - TYPE_UNSPECIFIED (0): - Default value. This value is unused. - GCP_REGIONAL_MIG (1): - Gateway hosted in a GCP regional managed - instance group. - """ - TYPE_UNSPECIFIED = 0 - GCP_REGIONAL_MIG = 1 - - type_: 'AppConnection.Gateway.Type' = proto.Field( - proto.ENUM, - number=2, - enum='AppConnection.Gateway.Type', - ) - uri: str = proto.Field( - proto.STRING, - number=3, - ) - ingress_port: int = proto.Field( - proto.INT32, - number=4, - ) - app_gateway: str = proto.Field( - proto.STRING, - number=5, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - uid: str = proto.Field( - proto.STRING, - number=6, - ) - type_: Type = proto.Field( - proto.ENUM, - number=7, - enum=Type, - ) - application_endpoint: ApplicationEndpoint = proto.Field( - proto.MESSAGE, - number=8, - message=ApplicationEndpoint, - ) - connectors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - state: State = proto.Field( - proto.ENUM, - number=10, - enum=State, - ) - gateway: Gateway = proto.Field( - proto.MESSAGE, - number=11, - message=Gateway, - ) - - -class AppConnectionOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/mypy.ini b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/noxfile.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/noxfile.py deleted file mode 100644 index 489882e54db6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-beyondcorp-appconnections' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_appconnections_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_appconnections_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_create_app_connection_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_create_app_connection_async.py deleted file mode 100644 index b56eb0d44131..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_create_app_connection_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_CreateAppConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -async def sample_create_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.CreateAppConnectionRequest( - parent="parent_value", - app_connection=app_connection, - ) - - # Make the request - operation = client.create_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_CreateAppConnection_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_create_app_connection_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_create_app_connection_sync.py deleted file mode 100644 index dd99e670aa08..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_create_app_connection_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_CreateAppConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -def sample_create_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.CreateAppConnectionRequest( - parent="parent_value", - app_connection=app_connection, - ) - - # Make the request - operation = client.create_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_CreateAppConnection_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_delete_app_connection_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_delete_app_connection_async.py deleted file mode 100644 index 230aad2b5407..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_delete_app_connection_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_DeleteAppConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -async def sample_delete_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.DeleteAppConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_DeleteAppConnection_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_delete_app_connection_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_delete_app_connection_sync.py deleted file mode 100644 index bef560e9d64e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_delete_app_connection_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_DeleteAppConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -def sample_delete_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.DeleteAppConnectionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_DeleteAppConnection_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_get_app_connection_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_get_app_connection_async.py deleted file mode 100644 index 03078c46186b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_get_app_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_GetAppConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -async def sample_get_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.GetAppConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_app_connection(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_GetAppConnection_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_get_app_connection_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_get_app_connection_sync.py deleted file mode 100644 index c359284436c7..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_get_app_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_GetAppConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -def sample_get_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.GetAppConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_app_connection(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_GetAppConnection_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_list_app_connections_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_list_app_connections_async.py deleted file mode 100644 index 88e0520be06a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_list_app_connections_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAppConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_ListAppConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -async def sample_list_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ListAppConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_ListAppConnections_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_list_app_connections_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_list_app_connections_sync.py deleted file mode 100644 index bde8077e86af..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_list_app_connections_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAppConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_ListAppConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -def sample_list_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ListAppConnectionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_ListAppConnections_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_resolve_app_connections_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_resolve_app_connections_async.py deleted file mode 100644 index bdec59ccee24..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_resolve_app_connections_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResolveAppConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_ResolveAppConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -async def sample_resolve_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ResolveAppConnectionsRequest( - parent="parent_value", - app_connector_id="app_connector_id_value", - ) - - # Make the request - page_result = client.resolve_app_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_ResolveAppConnections_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_resolve_app_connections_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_resolve_app_connections_sync.py deleted file mode 100644 index 50f275743064..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_resolve_app_connections_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ResolveAppConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_ResolveAppConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -def sample_resolve_app_connections(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnections_v1.ResolveAppConnectionsRequest( - parent="parent_value", - app_connector_id="app_connector_id_value", - ) - - # Make the request - page_result = client.resolve_app_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_ResolveAppConnections_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_update_app_connection_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_update_app_connection_async.py deleted file mode 100644 index 080698f77277..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_update_app_connection_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_UpdateAppConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -async def sample_update_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.UpdateAppConnectionRequest( - app_connection=app_connection, - ) - - # Make the request - operation = client.update_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_UpdateAppConnection_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_update_app_connection_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_update_app_connection_sync.py deleted file mode 100644 index 98699c137ca1..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/beyondcorp_v1_generated_app_connections_service_update_app_connection_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAppConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnections - - -# [START beyondcorp_v1_generated_AppConnectionsService_UpdateAppConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnections_v1 - - -def sample_update_app_connection(): - # Create a client - client = beyondcorp_appconnections_v1.AppConnectionsServiceClient() - - # Initialize request argument(s) - app_connection = beyondcorp_appconnections_v1.AppConnection() - app_connection.name = "name_value" - app_connection.type_ = "TCP_PROXY" - app_connection.application_endpoint.host = "host_value" - app_connection.application_endpoint.port = 453 - - request = beyondcorp_appconnections_v1.UpdateAppConnectionRequest( - app_connection=app_connection, - ) - - # Make the request - operation = client.update_app_connection(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectionsService_UpdateAppConnection_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json deleted file mode 100644 index f730a69adfa5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json +++ /dev/null @@ -1,1005 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.beyondcorp.appconnections.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-beyondcorp-appconnections", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient", - "shortName": "AppConnectionsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient.create_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.CreateAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "CreateAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.CreateAppConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "app_connection", - "type": "google.cloud.beyondcorp_appconnections_v1.types.AppConnection" - }, - { - "name": "app_connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_app_connection" - }, - "description": "Sample for CreateAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_create_app_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_CreateAppConnection_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_create_app_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient", - "shortName": "AppConnectionsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient.create_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.CreateAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "CreateAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.CreateAppConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "app_connection", - "type": "google.cloud.beyondcorp_appconnections_v1.types.AppConnection" - }, - { - "name": "app_connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_app_connection" - }, - "description": "Sample for CreateAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_create_app_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_CreateAppConnection_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_create_app_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient", - "shortName": "AppConnectionsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient.delete_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.DeleteAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "DeleteAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.DeleteAppConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_app_connection" - }, - "description": "Sample for DeleteAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_delete_app_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_DeleteAppConnection_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_delete_app_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient", - "shortName": "AppConnectionsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient.delete_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.DeleteAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "DeleteAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.DeleteAppConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_app_connection" - }, - "description": "Sample for DeleteAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_delete_app_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_DeleteAppConnection_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_delete_app_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient", - "shortName": "AppConnectionsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient.get_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.GetAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "GetAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.GetAppConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnections_v1.types.AppConnection", - "shortName": "get_app_connection" - }, - "description": "Sample for GetAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_get_app_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_GetAppConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_get_app_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient", - "shortName": "AppConnectionsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient.get_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.GetAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "GetAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.GetAppConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnections_v1.types.AppConnection", - "shortName": "get_app_connection" - }, - "description": "Sample for GetAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_get_app_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_GetAppConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_get_app_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient", - "shortName": "AppConnectionsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient.list_app_connections", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.ListAppConnections", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "ListAppConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ListAppConnectionsAsyncPager", - "shortName": "list_app_connections" - }, - "description": "Sample for ListAppConnections", - "file": "beyondcorp_v1_generated_app_connections_service_list_app_connections_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_ListAppConnections_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_list_app_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient", - "shortName": "AppConnectionsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient.list_app_connections", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.ListAppConnections", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "ListAppConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.ListAppConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ListAppConnectionsPager", - "shortName": "list_app_connections" - }, - "description": "Sample for ListAppConnections", - "file": "beyondcorp_v1_generated_app_connections_service_list_app_connections_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_ListAppConnections_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_list_app_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient", - "shortName": "AppConnectionsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient.resolve_app_connections", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.ResolveAppConnections", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "ResolveAppConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ResolveAppConnectionsAsyncPager", - "shortName": "resolve_app_connections" - }, - "description": "Sample for ResolveAppConnections", - "file": "beyondcorp_v1_generated_app_connections_service_resolve_app_connections_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_ResolveAppConnections_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_resolve_app_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient", - "shortName": "AppConnectionsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient.resolve_app_connections", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.ResolveAppConnections", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "ResolveAppConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.ResolveAppConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.pagers.ResolveAppConnectionsPager", - "shortName": "resolve_app_connections" - }, - "description": "Sample for ResolveAppConnections", - "file": "beyondcorp_v1_generated_app_connections_service_resolve_app_connections_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_ResolveAppConnections_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_resolve_app_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient", - "shortName": "AppConnectionsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceAsyncClient.update_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.UpdateAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "UpdateAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.UpdateAppConnectionRequest" - }, - { - "name": "app_connection", - "type": "google.cloud.beyondcorp_appconnections_v1.types.AppConnection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_app_connection" - }, - "description": "Sample for UpdateAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_update_app_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_UpdateAppConnection_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_update_app_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient", - "shortName": "AppConnectionsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnections_v1.AppConnectionsServiceClient.update_app_connection", - "method": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService.UpdateAppConnection", - "service": { - "fullName": "google.cloud.beyondcorp.appconnections.v1.AppConnectionsService", - "shortName": "AppConnectionsService" - }, - "shortName": "UpdateAppConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnections_v1.types.UpdateAppConnectionRequest" - }, - { - "name": "app_connection", - "type": "google.cloud.beyondcorp_appconnections_v1.types.AppConnection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_app_connection" - }, - "description": "Sample for UpdateAppConnection", - "file": "beyondcorp_v1_generated_app_connections_service_update_app_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectionsService_UpdateAppConnection_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connections_service_update_app_connection_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/scripts/fixup_beyondcorp_appconnections_v1_keywords.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/scripts/fixup_beyondcorp_appconnections_v1_keywords.py deleted file mode 100644 index 642936bed7dc..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/scripts/fixup_beyondcorp_appconnections_v1_keywords.py +++ /dev/null @@ -1,181 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class beyondcorp_appconnectionsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_app_connection': ('parent', 'app_connection', 'app_connection_id', 'request_id', 'validate_only', ), - 'delete_app_connection': ('name', 'request_id', 'validate_only', ), - 'get_app_connection': ('name', ), - 'list_app_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'resolve_app_connections': ('parent', 'app_connector_id', 'page_size', 'page_token', ), - 'update_app_connection': ('update_mask', 'app_connection', 'request_id', 'validate_only', 'allow_missing', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=beyondcorp_appconnectionsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the beyondcorp_appconnections client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/setup.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/setup.py deleted file mode 100644 index b93b95325bf9..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-beyondcorp-appconnections' - - -description = "Google Cloud Beyondcorp Appconnections API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/beyondcorp_appconnections/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnections" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/beyondcorp_appconnections_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/beyondcorp_appconnections_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/beyondcorp_appconnections_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py b/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py deleted file mode 100644 index 159efd20f2d1..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnections/v1/tests/unit/gapic/beyondcorp_appconnections_v1/test_app_connections_service.py +++ /dev/null @@ -1,8032 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service import AppConnectionsServiceAsyncClient -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service import AppConnectionsServiceClient -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service import pagers -from google.cloud.beyondcorp_appconnections_v1.services.app_connections_service import transports -from google.cloud.beyondcorp_appconnections_v1.types import app_connections_service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AppConnectionsServiceClient._get_default_mtls_endpoint(None) is None - assert AppConnectionsServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AppConnectionsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AppConnectionsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AppConnectionsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AppConnectionsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AppConnectionsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AppConnectionsServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AppConnectionsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AppConnectionsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AppConnectionsServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AppConnectionsServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AppConnectionsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AppConnectionsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AppConnectionsServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AppConnectionsServiceClient._get_client_cert_source(None, False) is None - assert AppConnectionsServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AppConnectionsServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AppConnectionsServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AppConnectionsServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AppConnectionsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceClient)) -@mock.patch.object(AppConnectionsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AppConnectionsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AppConnectionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AppConnectionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AppConnectionsServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AppConnectionsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AppConnectionsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppConnectionsServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AppConnectionsServiceClient._get_api_endpoint(None, None, default_universe, "always") == AppConnectionsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppConnectionsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AppConnectionsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppConnectionsServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AppConnectionsServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AppConnectionsServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AppConnectionsServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AppConnectionsServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AppConnectionsServiceClient._get_universe_domain(None, None) == AppConnectionsServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AppConnectionsServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AppConnectionsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AppConnectionsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AppConnectionsServiceClient, "grpc"), - (AppConnectionsServiceAsyncClient, "grpc_asyncio"), - (AppConnectionsServiceClient, "rest"), -]) -def test_app_connections_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AppConnectionsServiceGrpcTransport, "grpc"), - (transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AppConnectionsServiceRestTransport, "rest"), -]) -def test_app_connections_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AppConnectionsServiceClient, "grpc"), - (AppConnectionsServiceAsyncClient, "grpc_asyncio"), - (AppConnectionsServiceClient, "rest"), -]) -def test_app_connections_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -def test_app_connections_service_client_get_transport_class(): - transport = AppConnectionsServiceClient.get_transport_class() - available_transports = [ - transports.AppConnectionsServiceGrpcTransport, - transports.AppConnectionsServiceRestTransport, - ] - assert transport in available_transports - - transport = AppConnectionsServiceClient.get_transport_class("grpc") - assert transport == transports.AppConnectionsServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport, "grpc"), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AppConnectionsServiceClient, transports.AppConnectionsServiceRestTransport, "rest"), -]) -@mock.patch.object(AppConnectionsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceClient)) -@mock.patch.object(AppConnectionsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceAsyncClient)) -def test_app_connections_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AppConnectionsServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AppConnectionsServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport, "grpc", "true"), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport, "grpc", "false"), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AppConnectionsServiceClient, transports.AppConnectionsServiceRestTransport, "rest", "true"), - (AppConnectionsServiceClient, transports.AppConnectionsServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AppConnectionsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceClient)) -@mock.patch.object(AppConnectionsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_app_connections_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AppConnectionsServiceClient, AppConnectionsServiceAsyncClient -]) -@mock.patch.object(AppConnectionsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AppConnectionsServiceClient)) -@mock.patch.object(AppConnectionsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AppConnectionsServiceAsyncClient)) -def test_app_connections_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AppConnectionsServiceClient, AppConnectionsServiceAsyncClient -]) -@mock.patch.object(AppConnectionsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceClient)) -@mock.patch.object(AppConnectionsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectionsServiceAsyncClient)) -def test_app_connections_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AppConnectionsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AppConnectionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AppConnectionsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport, "grpc"), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AppConnectionsServiceClient, transports.AppConnectionsServiceRestTransport, "rest"), -]) -def test_app_connections_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport, "grpc", grpc_helpers), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AppConnectionsServiceClient, transports.AppConnectionsServiceRestTransport, "rest", None), -]) -def test_app_connections_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_app_connections_service_client_client_options_from_dict(): - with mock.patch('google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.transports.AppConnectionsServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AppConnectionsServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport, "grpc", grpc_helpers), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_app_connections_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.ListAppConnectionsRequest, - dict, -]) -def test_list_app_connections(request_type, transport: str = 'grpc'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.ListAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connections_service.ListAppConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_app_connections_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connections_service.ListAppConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_app_connections(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connections_service.ListAppConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_app_connections_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_app_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_app_connections] = mock_rpc - request = {} - client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_app_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_app_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_app_connections in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_app_connections] = mock_rpc - - request = {} - await client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_app_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_app_connections_async(transport: str = 'grpc_asyncio', request_type=app_connections_service.ListAppConnectionsRequest): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ListAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connections_service.ListAppConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_app_connections_async_from_dict(): - await test_list_app_connections_async(request_type=dict) - -def test_list_app_connections_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.ListAppConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - call.return_value = app_connections_service.ListAppConnectionsResponse() - client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_app_connections_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.ListAppConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ListAppConnectionsResponse()) - await client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_app_connections_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.ListAppConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_app_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_app_connections_flattened_error(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_app_connections( - app_connections_service.ListAppConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_app_connections_flattened_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.ListAppConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ListAppConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_app_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_app_connections_flattened_error_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_app_connections( - app_connections_service.ListAppConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_app_connections_pager(transport_name: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - next_page_token='abc', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[], - next_page_token='def', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - ], - next_page_token='ghi', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_app_connections(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_connections_service.AppConnection) - for i in results) -def test_list_app_connections_pages(transport_name: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - next_page_token='abc', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[], - next_page_token='def', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - ], - next_page_token='ghi', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_app_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_app_connections_async_pager(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - next_page_token='abc', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[], - next_page_token='def', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - ], - next_page_token='ghi', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_app_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, app_connections_service.AppConnection) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_app_connections_async_pages(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - next_page_token='abc', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[], - next_page_token='def', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - ], - next_page_token='ghi', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_app_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - app_connections_service.GetAppConnectionRequest, - dict, -]) -def test_get_app_connection(request_type, transport: str = 'grpc'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.AppConnection( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_connections_service.AppConnection.Type.TCP_PROXY, - connectors=['connectors_value'], - state=app_connections_service.AppConnection.State.CREATING, - ) - response = client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connections_service.GetAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, app_connections_service.AppConnection) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.type_ == app_connections_service.AppConnection.Type.TCP_PROXY - assert response.connectors == ['connectors_value'] - assert response.state == app_connections_service.AppConnection.State.CREATING - - -def test_get_app_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connections_service.GetAppConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_app_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connections_service.GetAppConnectionRequest( - name='name_value', - ) - -def test_get_app_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_app_connection] = mock_rpc - request = {} - client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_app_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_app_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_app_connection] = mock_rpc - - request = {} - await client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_app_connection_async(transport: str = 'grpc_asyncio', request_type=app_connections_service.GetAppConnectionRequest): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.AppConnection( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_connections_service.AppConnection.Type.TCP_PROXY, - connectors=['connectors_value'], - state=app_connections_service.AppConnection.State.CREATING, - )) - response = await client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connections_service.GetAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, app_connections_service.AppConnection) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.type_ == app_connections_service.AppConnection.Type.TCP_PROXY - assert response.connectors == ['connectors_value'] - assert response.state == app_connections_service.AppConnection.State.CREATING - - -@pytest.mark.asyncio -async def test_get_app_connection_async_from_dict(): - await test_get_app_connection_async(request_type=dict) - -def test_get_app_connection_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.GetAppConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - call.return_value = app_connections_service.AppConnection() - client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_app_connection_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.GetAppConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.AppConnection()) - await client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_app_connection_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.AppConnection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_app_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_app_connection_flattened_error(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_app_connection( - app_connections_service.GetAppConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_app_connection_flattened_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.AppConnection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.AppConnection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_app_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_app_connection_flattened_error_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_app_connection( - app_connections_service.GetAppConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.CreateAppConnectionRequest, - dict, -]) -def test_create_app_connection(request_type, transport: str = 'grpc'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connections_service.CreateAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_app_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connections_service.CreateAppConnectionRequest( - parent='parent_value', - app_connection_id='app_connection_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_app_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connections_service.CreateAppConnectionRequest( - parent='parent_value', - app_connection_id='app_connection_id_value', - request_id='request_id_value', - ) - -def test_create_app_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_app_connection] = mock_rpc - request = {} - client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_app_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_app_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_app_connection] = mock_rpc - - request = {} - await client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_app_connection_async(transport: str = 'grpc_asyncio', request_type=app_connections_service.CreateAppConnectionRequest): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connections_service.CreateAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_app_connection_async_from_dict(): - await test_create_app_connection_async(request_type=dict) - -def test_create_app_connection_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.CreateAppConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_app_connection_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.CreateAppConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_app_connection_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_app_connection( - parent='parent_value', - app_connection=app_connections_service.AppConnection(name='name_value'), - app_connection_id='app_connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].app_connection - mock_val = app_connections_service.AppConnection(name='name_value') - assert arg == mock_val - arg = args[0].app_connection_id - mock_val = 'app_connection_id_value' - assert arg == mock_val - - -def test_create_app_connection_flattened_error(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_app_connection( - app_connections_service.CreateAppConnectionRequest(), - parent='parent_value', - app_connection=app_connections_service.AppConnection(name='name_value'), - app_connection_id='app_connection_id_value', - ) - -@pytest.mark.asyncio -async def test_create_app_connection_flattened_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_app_connection( - parent='parent_value', - app_connection=app_connections_service.AppConnection(name='name_value'), - app_connection_id='app_connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].app_connection - mock_val = app_connections_service.AppConnection(name='name_value') - assert arg == mock_val - arg = args[0].app_connection_id - mock_val = 'app_connection_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_app_connection_flattened_error_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_app_connection( - app_connections_service.CreateAppConnectionRequest(), - parent='parent_value', - app_connection=app_connections_service.AppConnection(name='name_value'), - app_connection_id='app_connection_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.UpdateAppConnectionRequest, - dict, -]) -def test_update_app_connection(request_type, transport: str = 'grpc'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connections_service.UpdateAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_app_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connections_service.UpdateAppConnectionRequest( - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_app_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connections_service.UpdateAppConnectionRequest( - request_id='request_id_value', - ) - -def test_update_app_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_app_connection] = mock_rpc - request = {} - client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_app_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_app_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_app_connection] = mock_rpc - - request = {} - await client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_app_connection_async(transport: str = 'grpc_asyncio', request_type=app_connections_service.UpdateAppConnectionRequest): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connections_service.UpdateAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_app_connection_async_from_dict(): - await test_update_app_connection_async(request_type=dict) - -def test_update_app_connection_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.UpdateAppConnectionRequest() - - request.app_connection.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'app_connection.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_app_connection_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.UpdateAppConnectionRequest() - - request.app_connection.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'app_connection.name=name_value', - ) in kw['metadata'] - - -def test_update_app_connection_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_app_connection( - app_connection=app_connections_service.AppConnection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].app_connection - mock_val = app_connections_service.AppConnection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_app_connection_flattened_error(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_app_connection( - app_connections_service.UpdateAppConnectionRequest(), - app_connection=app_connections_service.AppConnection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_app_connection_flattened_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_app_connection( - app_connection=app_connections_service.AppConnection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].app_connection - mock_val = app_connections_service.AppConnection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_app_connection_flattened_error_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_app_connection( - app_connections_service.UpdateAppConnectionRequest(), - app_connection=app_connections_service.AppConnection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.DeleteAppConnectionRequest, - dict, -]) -def test_delete_app_connection(request_type, transport: str = 'grpc'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connections_service.DeleteAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_app_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connections_service.DeleteAppConnectionRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_app_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connections_service.DeleteAppConnectionRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_app_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_app_connection] = mock_rpc - request = {} - client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_app_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_app_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_app_connection] = mock_rpc - - request = {} - await client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_app_connection_async(transport: str = 'grpc_asyncio', request_type=app_connections_service.DeleteAppConnectionRequest): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connections_service.DeleteAppConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_app_connection_async_from_dict(): - await test_delete_app_connection_async(request_type=dict) - -def test_delete_app_connection_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.DeleteAppConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_app_connection_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.DeleteAppConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_app_connection_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_app_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_app_connection_flattened_error(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_app_connection( - app_connections_service.DeleteAppConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_app_connection_flattened_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_app_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_app_connection_flattened_error_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_app_connection( - app_connections_service.DeleteAppConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.ResolveAppConnectionsRequest, - dict, -]) -def test_resolve_app_connections(request_type, transport: str = 'grpc'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.ResolveAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connections_service.ResolveAppConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ResolveAppConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_resolve_app_connections_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connections_service.ResolveAppConnectionsRequest( - parent='parent_value', - app_connector_id='app_connector_id_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.resolve_app_connections(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connections_service.ResolveAppConnectionsRequest( - parent='parent_value', - app_connector_id='app_connector_id_value', - page_token='page_token_value', - ) - -def test_resolve_app_connections_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.resolve_app_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.resolve_app_connections] = mock_rpc - request = {} - client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.resolve_app_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_resolve_app_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.resolve_app_connections in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.resolve_app_connections] = mock_rpc - - request = {} - await client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.resolve_app_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_resolve_app_connections_async(transport: str = 'grpc_asyncio', request_type=app_connections_service.ResolveAppConnectionsRequest): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ResolveAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connections_service.ResolveAppConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ResolveAppConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_resolve_app_connections_async_from_dict(): - await test_resolve_app_connections_async(request_type=dict) - -def test_resolve_app_connections_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.ResolveAppConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - call.return_value = app_connections_service.ResolveAppConnectionsResponse() - client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_resolve_app_connections_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connections_service.ResolveAppConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ResolveAppConnectionsResponse()) - await client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_resolve_app_connections_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.ResolveAppConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resolve_app_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_resolve_app_connections_flattened_error(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resolve_app_connections( - app_connections_service.ResolveAppConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_resolve_app_connections_flattened_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connections_service.ResolveAppConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ResolveAppConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.resolve_app_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_resolve_app_connections_flattened_error_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.resolve_app_connections( - app_connections_service.ResolveAppConnectionsRequest(), - parent='parent_value', - ) - - -def test_resolve_app_connections_pager(transport_name: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='abc', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[], - next_page_token='def', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='ghi', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.resolve_app_connections(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails) - for i in results) -def test_resolve_app_connections_pages(transport_name: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='abc', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[], - next_page_token='def', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='ghi', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - ), - RuntimeError, - ) - pages = list(client.resolve_app_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_resolve_app_connections_async_pager(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='abc', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[], - next_page_token='def', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='ghi', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - ), - RuntimeError, - ) - async_pager = await client.resolve_app_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails) - for i in responses) - - -@pytest.mark.asyncio -async def test_resolve_app_connections_async_pages(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='abc', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[], - next_page_token='def', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='ghi', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.resolve_app_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_app_connections_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_app_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_app_connections] = mock_rpc - - request = {} - client.list_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_app_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_app_connections_rest_required_fields(request_type=app_connections_service.ListAppConnectionsRequest): - transport_class = transports.AppConnectionsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_app_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_app_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_connections_service.ListAppConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connections_service.ListAppConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_app_connections(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_app_connections_rest_unset_required_fields(): - transport = transports.AppConnectionsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_app_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_app_connections_rest_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connections_service.ListAppConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_connections_service.ListAppConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_app_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appConnections" % client.transport._host, args[1]) - - -def test_list_app_connections_rest_flattened_error(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_app_connections( - app_connections_service.ListAppConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_app_connections_rest_pager(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - next_page_token='abc', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[], - next_page_token='def', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - ], - next_page_token='ghi', - ), - app_connections_service.ListAppConnectionsResponse( - app_connections=[ - app_connections_service.AppConnection(), - app_connections_service.AppConnection(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(app_connections_service.ListAppConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_app_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_connections_service.AppConnection) - for i in results) - - pages = list(client.list_app_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_app_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_app_connection] = mock_rpc - - request = {} - client.get_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_app_connection_rest_required_fields(request_type=app_connections_service.GetAppConnectionRequest): - transport_class = transports.AppConnectionsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_app_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_app_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_connections_service.AppConnection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connections_service.AppConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_app_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_app_connection_rest_unset_required_fields(): - transport = transports.AppConnectionsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_app_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_app_connection_rest_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connections_service.AppConnection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/appConnections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_connections_service.AppConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_app_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/appConnections/*}" % client.transport._host, args[1]) - - -def test_get_app_connection_rest_flattened_error(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_app_connection( - app_connections_service.GetAppConnectionRequest(), - name='name_value', - ) - - -def test_create_app_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_app_connection] = mock_rpc - - request = {} - client.create_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_app_connection_rest_required_fields(request_type=app_connections_service.CreateAppConnectionRequest): - transport_class = transports.AppConnectionsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_app_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_app_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("app_connection_id", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_app_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_app_connection_rest_unset_required_fields(): - transport = transports.AppConnectionsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_app_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("appConnectionId", "requestId", "validateOnly", )) & set(("parent", "appConnection", ))) - - -def test_create_app_connection_rest_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - app_connection=app_connections_service.AppConnection(name='name_value'), - app_connection_id='app_connection_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_app_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appConnections" % client.transport._host, args[1]) - - -def test_create_app_connection_rest_flattened_error(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_app_connection( - app_connections_service.CreateAppConnectionRequest(), - parent='parent_value', - app_connection=app_connections_service.AppConnection(name='name_value'), - app_connection_id='app_connection_id_value', - ) - - -def test_update_app_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_app_connection] = mock_rpc - - request = {} - client.update_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_app_connection_rest_required_fields(request_type=app_connections_service.UpdateAppConnectionRequest): - transport_class = transports.AppConnectionsServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_app_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_app_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "request_id", "update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_app_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_app_connection_rest_unset_required_fields(): - transport = transports.AppConnectionsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_app_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "requestId", "updateMask", "validateOnly", )) & set(("updateMask", "appConnection", ))) - - -def test_update_app_connection_rest_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'app_connection': {'name': 'projects/sample1/locations/sample2/appConnections/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - app_connection=app_connections_service.AppConnection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_app_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{app_connection.name=projects/*/locations/*/appConnections/*}" % client.transport._host, args[1]) - - -def test_update_app_connection_rest_flattened_error(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_app_connection( - app_connections_service.UpdateAppConnectionRequest(), - app_connection=app_connections_service.AppConnection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_app_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_app_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_app_connection] = mock_rpc - - request = {} - client.delete_app_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_app_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_app_connection_rest_required_fields(request_type=app_connections_service.DeleteAppConnectionRequest): - transport_class = transports.AppConnectionsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_app_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_app_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_app_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_app_connection_rest_unset_required_fields(): - transport = transports.AppConnectionsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_app_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("name", ))) - - -def test_delete_app_connection_rest_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/appConnections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_app_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/appConnections/*}" % client.transport._host, args[1]) - - -def test_delete_app_connection_rest_flattened_error(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_app_connection( - app_connections_service.DeleteAppConnectionRequest(), - name='name_value', - ) - - -def test_resolve_app_connections_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.resolve_app_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.resolve_app_connections] = mock_rpc - - request = {} - client.resolve_app_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.resolve_app_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_resolve_app_connections_rest_required_fields(request_type=app_connections_service.ResolveAppConnectionsRequest): - transport_class = transports.AppConnectionsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["app_connector_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "appConnectorId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resolve_app_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "appConnectorId" in jsonified_request - assert jsonified_request["appConnectorId"] == request_init["app_connector_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["appConnectorId"] = 'app_connector_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).resolve_app_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("app_connector_id", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "appConnectorId" in jsonified_request - assert jsonified_request["appConnectorId"] == 'app_connector_id_value' - - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_connections_service.ResolveAppConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connections_service.ResolveAppConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.resolve_app_connections(request) - - expected_params = [ - ( - "appConnectorId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_resolve_app_connections_rest_unset_required_fields(): - transport = transports.AppConnectionsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.resolve_app_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("appConnectorId", "pageSize", "pageToken", )) & set(("parent", "appConnectorId", ))) - - -def test_resolve_app_connections_rest_flattened(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connections_service.ResolveAppConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_connections_service.ResolveAppConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.resolve_app_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appConnections:resolve" % client.transport._host, args[1]) - - -def test_resolve_app_connections_rest_flattened_error(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.resolve_app_connections( - app_connections_service.ResolveAppConnectionsRequest(), - parent='parent_value', - ) - - -def test_resolve_app_connections_rest_pager(transport: str = 'rest'): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='abc', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[], - next_page_token='def', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - next_page_token='ghi', - ), - app_connections_service.ResolveAppConnectionsResponse( - app_connection_details=[ - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(app_connections_service.ResolveAppConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.resolve_app_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_connections_service.ResolveAppConnectionsResponse.AppConnectionDetails) - for i in results) - - pages = list(client.resolve_app_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AppConnectionsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AppConnectionsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppConnectionsServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AppConnectionsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AppConnectionsServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AppConnectionsServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AppConnectionsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppConnectionsServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AppConnectionsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AppConnectionsServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AppConnectionsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AppConnectionsServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AppConnectionsServiceGrpcTransport, - transports.AppConnectionsServiceGrpcAsyncIOTransport, - transports.AppConnectionsServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AppConnectionsServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_app_connections_empty_call_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - call.return_value = app_connections_service.ListAppConnectionsResponse() - client.list_app_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.ListAppConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_app_connection_empty_call_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - call.return_value = app_connections_service.AppConnection() - client.get_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.GetAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_app_connection_empty_call_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.CreateAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_app_connection_empty_call_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.UpdateAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_app_connection_empty_call_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.DeleteAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_resolve_app_connections_empty_call_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - call.return_value = app_connections_service.ResolveAppConnectionsResponse() - client.resolve_app_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.ResolveAppConnectionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AppConnectionsServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_app_connections_empty_call_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ListAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_app_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.ListAppConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_app_connection_empty_call_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.AppConnection( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_connections_service.AppConnection.Type.TCP_PROXY, - connectors=['connectors_value'], - state=app_connections_service.AppConnection.State.CREATING, - )) - await client.get_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.GetAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_app_connection_empty_call_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.CreateAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_app_connection_empty_call_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.UpdateAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_app_connection_empty_call_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.DeleteAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_resolve_app_connections_empty_call_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connections_service.ResolveAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.resolve_app_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.ResolveAppConnectionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AppConnectionsServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_app_connections_rest_bad_request(request_type=app_connections_service.ListAppConnectionsRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_app_connections(request) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.ListAppConnectionsRequest, - dict, -]) -def test_list_app_connections_rest_call_success(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connections_service.ListAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connections_service.ListAppConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_app_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_app_connections_rest_interceptors(null_interceptor): - transport = transports.AppConnectionsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectionsServiceRestInterceptor(), - ) - client = AppConnectionsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_list_app_connections") as post, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_list_app_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "pre_list_app_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connections_service.ListAppConnectionsRequest.pb(app_connections_service.ListAppConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_connections_service.ListAppConnectionsResponse.to_json(app_connections_service.ListAppConnectionsResponse()) - req.return_value.content = return_value - - request = app_connections_service.ListAppConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_connections_service.ListAppConnectionsResponse() - post_with_metadata.return_value = app_connections_service.ListAppConnectionsResponse(), metadata - - client.list_app_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_app_connection_rest_bad_request(request_type=app_connections_service.GetAppConnectionRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_app_connection(request) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.GetAppConnectionRequest, - dict, -]) -def test_get_app_connection_rest_call_success(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connections_service.AppConnection( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_connections_service.AppConnection.Type.TCP_PROXY, - connectors=['connectors_value'], - state=app_connections_service.AppConnection.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connections_service.AppConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_app_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, app_connections_service.AppConnection) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.type_ == app_connections_service.AppConnection.Type.TCP_PROXY - assert response.connectors == ['connectors_value'] - assert response.state == app_connections_service.AppConnection.State.CREATING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_app_connection_rest_interceptors(null_interceptor): - transport = transports.AppConnectionsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectionsServiceRestInterceptor(), - ) - client = AppConnectionsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_get_app_connection") as post, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_get_app_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "pre_get_app_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connections_service.GetAppConnectionRequest.pb(app_connections_service.GetAppConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_connections_service.AppConnection.to_json(app_connections_service.AppConnection()) - req.return_value.content = return_value - - request = app_connections_service.GetAppConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_connections_service.AppConnection() - post_with_metadata.return_value = app_connections_service.AppConnection(), metadata - - client.get_app_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_app_connection_rest_bad_request(request_type=app_connections_service.CreateAppConnectionRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_app_connection(request) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.CreateAppConnectionRequest, - dict, -]) -def test_create_app_connection_rest_call_success(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["app_connection"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'display_name': 'display_name_value', 'uid': 'uid_value', 'type_': 1, 'application_endpoint': {'host': 'host_value', 'port': 453}, 'connectors': ['connectors_value1', 'connectors_value2'], 'state': 1, 'gateway': {'type_': 1, 'uri': 'uri_value', 'ingress_port': 1311, 'app_gateway': 'app_gateway_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = app_connections_service.CreateAppConnectionRequest.meta.fields["app_connection"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["app_connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["app_connection"][field])): - del request_init["app_connection"][field][i][subfield] - else: - del request_init["app_connection"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_app_connection(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_app_connection_rest_interceptors(null_interceptor): - transport = transports.AppConnectionsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectionsServiceRestInterceptor(), - ) - client = AppConnectionsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_create_app_connection") as post, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_create_app_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "pre_create_app_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connections_service.CreateAppConnectionRequest.pb(app_connections_service.CreateAppConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connections_service.CreateAppConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_app_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_app_connection_rest_bad_request(request_type=app_connections_service.UpdateAppConnectionRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'app_connection': {'name': 'projects/sample1/locations/sample2/appConnections/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_app_connection(request) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.UpdateAppConnectionRequest, - dict, -]) -def test_update_app_connection_rest_call_success(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'app_connection': {'name': 'projects/sample1/locations/sample2/appConnections/sample3'}} - request_init["app_connection"] = {'name': 'projects/sample1/locations/sample2/appConnections/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'display_name': 'display_name_value', 'uid': 'uid_value', 'type_': 1, 'application_endpoint': {'host': 'host_value', 'port': 453}, 'connectors': ['connectors_value1', 'connectors_value2'], 'state': 1, 'gateway': {'type_': 1, 'uri': 'uri_value', 'ingress_port': 1311, 'app_gateway': 'app_gateway_value'}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = app_connections_service.UpdateAppConnectionRequest.meta.fields["app_connection"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["app_connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["app_connection"][field])): - del request_init["app_connection"][field][i][subfield] - else: - del request_init["app_connection"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_app_connection(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_app_connection_rest_interceptors(null_interceptor): - transport = transports.AppConnectionsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectionsServiceRestInterceptor(), - ) - client = AppConnectionsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_update_app_connection") as post, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_update_app_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "pre_update_app_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connections_service.UpdateAppConnectionRequest.pb(app_connections_service.UpdateAppConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connections_service.UpdateAppConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_app_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_app_connection_rest_bad_request(request_type=app_connections_service.DeleteAppConnectionRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_app_connection(request) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.DeleteAppConnectionRequest, - dict, -]) -def test_delete_app_connection_rest_call_success(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_app_connection(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_app_connection_rest_interceptors(null_interceptor): - transport = transports.AppConnectionsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectionsServiceRestInterceptor(), - ) - client = AppConnectionsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_delete_app_connection") as post, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_delete_app_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "pre_delete_app_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connections_service.DeleteAppConnectionRequest.pb(app_connections_service.DeleteAppConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connections_service.DeleteAppConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_app_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_resolve_app_connections_rest_bad_request(request_type=app_connections_service.ResolveAppConnectionsRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.resolve_app_connections(request) - - -@pytest.mark.parametrize("request_type", [ - app_connections_service.ResolveAppConnectionsRequest, - dict, -]) -def test_resolve_app_connections_rest_call_success(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connections_service.ResolveAppConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connections_service.ResolveAppConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.resolve_app_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ResolveAppConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_resolve_app_connections_rest_interceptors(null_interceptor): - transport = transports.AppConnectionsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectionsServiceRestInterceptor(), - ) - client = AppConnectionsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_resolve_app_connections") as post, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "post_resolve_app_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectionsServiceRestInterceptor, "pre_resolve_app_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connections_service.ResolveAppConnectionsRequest.pb(app_connections_service.ResolveAppConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_connections_service.ResolveAppConnectionsResponse.to_json(app_connections_service.ResolveAppConnectionsResponse()) - req.return_value.content = return_value - - request = app_connections_service.ResolveAppConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_connections_service.ResolveAppConnectionsResponse() - post_with_metadata.return_value = app_connections_service.ResolveAppConnectionsResponse(), metadata - - client.resolve_app_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_app_connections_empty_call_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connections), - '__call__') as call: - client.list_app_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.ListAppConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_app_connection_empty_call_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connection), - '__call__') as call: - client.get_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.GetAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_app_connection_empty_call_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connection), - '__call__') as call: - client.create_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.CreateAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_app_connection_empty_call_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connection), - '__call__') as call: - client.update_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.UpdateAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_app_connection_empty_call_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connection), - '__call__') as call: - client.delete_app_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.DeleteAppConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_resolve_app_connections_empty_call_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.resolve_app_connections), - '__call__') as call: - client.resolve_app_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connections_service.ResolveAppConnectionsRequest() - - assert args[0] == request_msg - - -def test_app_connections_service_rest_lro_client(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AppConnectionsServiceGrpcTransport, - ) - -def test_app_connections_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AppConnectionsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_app_connections_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.transports.AppConnectionsServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AppConnectionsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_app_connections', - 'get_app_connection', - 'create_app_connection', - 'update_app_connection', - 'delete_app_connection', - 'resolve_app_connections', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_app_connections_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.transports.AppConnectionsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AppConnectionsServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_app_connections_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.beyondcorp_appconnections_v1.services.app_connections_service.transports.AppConnectionsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AppConnectionsServiceTransport() - adc.assert_called_once() - - -def test_app_connections_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AppConnectionsServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AppConnectionsServiceGrpcTransport, - transports.AppConnectionsServiceGrpcAsyncIOTransport, - ], -) -def test_app_connections_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AppConnectionsServiceGrpcTransport, - transports.AppConnectionsServiceGrpcAsyncIOTransport, - transports.AppConnectionsServiceRestTransport, - ], -) -def test_app_connections_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AppConnectionsServiceGrpcTransport, grpc_helpers), - (transports.AppConnectionsServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_app_connections_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AppConnectionsServiceGrpcTransport, transports.AppConnectionsServiceGrpcAsyncIOTransport]) -def test_app_connections_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_app_connections_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AppConnectionsServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_app_connections_service_host_no_port(transport_name): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_app_connections_service_host_with_port(transport_name): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_app_connections_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AppConnectionsServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AppConnectionsServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_app_connections._session - session2 = client2.transport.list_app_connections._session - assert session1 != session2 - session1 = client1.transport.get_app_connection._session - session2 = client2.transport.get_app_connection._session - assert session1 != session2 - session1 = client1.transport.create_app_connection._session - session2 = client2.transport.create_app_connection._session - assert session1 != session2 - session1 = client1.transport.update_app_connection._session - session2 = client2.transport.update_app_connection._session - assert session1 != session2 - session1 = client1.transport.delete_app_connection._session - session2 = client2.transport.delete_app_connection._session - assert session1 != session2 - session1 = client1.transport.resolve_app_connections._session - session2 = client2.transport.resolve_app_connections._session - assert session1 != session2 -def test_app_connections_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AppConnectionsServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_app_connections_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AppConnectionsServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AppConnectionsServiceGrpcTransport, transports.AppConnectionsServiceGrpcAsyncIOTransport]) -def test_app_connections_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AppConnectionsServiceGrpcTransport, transports.AppConnectionsServiceGrpcAsyncIOTransport]) -def test_app_connections_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_app_connections_service_grpc_lro_client(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_app_connections_service_grpc_lro_async_client(): - client = AppConnectionsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_app_connection_path(): - project = "squid" - location = "clam" - app_connection = "whelk" - expected = "projects/{project}/locations/{location}/appConnections/{app_connection}".format(project=project, location=location, app_connection=app_connection, ) - actual = AppConnectionsServiceClient.app_connection_path(project, location, app_connection) - assert expected == actual - - -def test_parse_app_connection_path(): - expected = { - "project": "octopus", - "location": "oyster", - "app_connection": "nudibranch", - } - path = AppConnectionsServiceClient.app_connection_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_app_connection_path(path) - assert expected == actual - -def test_app_connector_path(): - project = "cuttlefish" - location = "mussel" - app_connector = "winkle" - expected = "projects/{project}/locations/{location}/appConnectors/{app_connector}".format(project=project, location=location, app_connector=app_connector, ) - actual = AppConnectionsServiceClient.app_connector_path(project, location, app_connector) - assert expected == actual - - -def test_parse_app_connector_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "app_connector": "abalone", - } - path = AppConnectionsServiceClient.app_connector_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_app_connector_path(path) - assert expected == actual - -def test_app_gateway_path(): - project = "squid" - location = "clam" - app_gateway = "whelk" - expected = "projects/{project}/locations/{location}/appGateways/{app_gateway}".format(project=project, location=location, app_gateway=app_gateway, ) - actual = AppConnectionsServiceClient.app_gateway_path(project, location, app_gateway) - assert expected == actual - - -def test_parse_app_gateway_path(): - expected = { - "project": "octopus", - "location": "oyster", - "app_gateway": "nudibranch", - } - path = AppConnectionsServiceClient.app_gateway_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_app_gateway_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AppConnectionsServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AppConnectionsServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AppConnectionsServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AppConnectionsServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AppConnectionsServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AppConnectionsServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AppConnectionsServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AppConnectionsServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AppConnectionsServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AppConnectionsServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectionsServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AppConnectionsServiceTransport, '_prep_wrapped_messages') as prep: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AppConnectionsServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AppConnectionsServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AppConnectionsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AppConnectionsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AppConnectionsServiceClient, transports.AppConnectionsServiceGrpcTransport), - (AppConnectionsServiceAsyncClient, transports.AppConnectionsServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/.coveragerc b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/.coveragerc deleted file mode 100644 index 65e684340214..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/beyondcorp_appconnectors/__init__.py - google/cloud/beyondcorp_appconnectors/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/.flake8 b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/MANIFEST.in b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/MANIFEST.in deleted file mode 100644 index a488867aba3e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/beyondcorp_appconnectors *.py -recursive-include google/cloud/beyondcorp_appconnectors_v1 *.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/README.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/README.rst deleted file mode 100644 index 77310339152d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Beyondcorp Appconnectors API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Beyondcorp Appconnectors API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/app_connectors_service.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/app_connectors_service.rst deleted file mode 100644 index 365a57210e7e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/app_connectors_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AppConnectorsService --------------------------------------- - -.. automodule:: google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service - :members: - :inherited-members: - -.. automodule:: google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/services_.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/services_.rst deleted file mode 100644 index a72a19d0ce55..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Beyondcorp Appconnectors v1 API -========================================================= -.. toctree:: - :maxdepth: 2 - - app_connectors_service diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/types_.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/types_.rst deleted file mode 100644 index 9a03154fd8b0..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/beyondcorp_appconnectors_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Beyondcorp Appconnectors v1 API -====================================================== - -.. automodule:: google.cloud.beyondcorp_appconnectors_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/conf.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/conf.py deleted file mode 100644 index 60913726030d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-beyondcorp-appconnectors documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-beyondcorp-appconnectors" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-beyondcorp-appconnectors-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-appconnectors.tex", - u"google-cloud-beyondcorp-appconnectors Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-beyondcorp-appconnectors", - u"Google Cloud Beyondcorp Appconnectors Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-appconnectors", - u"google-cloud-beyondcorp-appconnectors Documentation", - author, - "google-cloud-beyondcorp-appconnectors", - "GAPIC library for Google Cloud Beyondcorp Appconnectors API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/index.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/index.rst deleted file mode 100644 index 2cd96106309a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - beyondcorp_appconnectors_v1/services_ - beyondcorp_appconnectors_v1/types_ diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/__init__.py deleted file mode 100644 index adcb00d7734c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_appconnectors import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.client import AppConnectorsServiceClient -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.async_client import AppConnectorsServiceAsyncClient - -from google.cloud.beyondcorp_appconnectors_v1.types.app_connector_instance_config import AppConnectorInstanceConfig -from google.cloud.beyondcorp_appconnectors_v1.types.app_connector_instance_config import ImageConfig -from google.cloud.beyondcorp_appconnectors_v1.types.app_connector_instance_config import NotificationConfig -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import AppConnector -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import AppConnectorOperationMetadata -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import CreateAppConnectorRequest -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import DeleteAppConnectorRequest -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import GetAppConnectorRequest -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import ListAppConnectorsRequest -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import ListAppConnectorsResponse -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import ReportStatusRequest -from google.cloud.beyondcorp_appconnectors_v1.types.app_connectors_service import UpdateAppConnectorRequest -from google.cloud.beyondcorp_appconnectors_v1.types.resource_info import ResourceInfo -from google.cloud.beyondcorp_appconnectors_v1.types.resource_info import HealthStatus - -__all__ = ('AppConnectorsServiceClient', - 'AppConnectorsServiceAsyncClient', - 'AppConnectorInstanceConfig', - 'ImageConfig', - 'NotificationConfig', - 'AppConnector', - 'AppConnectorOperationMetadata', - 'CreateAppConnectorRequest', - 'DeleteAppConnectorRequest', - 'GetAppConnectorRequest', - 'ListAppConnectorsRequest', - 'ListAppConnectorsResponse', - 'ReportStatusRequest', - 'UpdateAppConnectorRequest', - 'ResourceInfo', - 'HealthStatus', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/py.typed b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/py.typed deleted file mode 100644 index 894220ff506f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-appconnectors package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/__init__.py deleted file mode 100644 index a27a5d0b2370..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_appconnectors_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.app_connectors_service import AppConnectorsServiceClient -from .services.app_connectors_service import AppConnectorsServiceAsyncClient - -from .types.app_connector_instance_config import AppConnectorInstanceConfig -from .types.app_connector_instance_config import ImageConfig -from .types.app_connector_instance_config import NotificationConfig -from .types.app_connectors_service import AppConnector -from .types.app_connectors_service import AppConnectorOperationMetadata -from .types.app_connectors_service import CreateAppConnectorRequest -from .types.app_connectors_service import DeleteAppConnectorRequest -from .types.app_connectors_service import GetAppConnectorRequest -from .types.app_connectors_service import ListAppConnectorsRequest -from .types.app_connectors_service import ListAppConnectorsResponse -from .types.app_connectors_service import ReportStatusRequest -from .types.app_connectors_service import UpdateAppConnectorRequest -from .types.resource_info import ResourceInfo -from .types.resource_info import HealthStatus - -__all__ = ( - 'AppConnectorsServiceAsyncClient', -'AppConnector', -'AppConnectorInstanceConfig', -'AppConnectorOperationMetadata', -'AppConnectorsServiceClient', -'CreateAppConnectorRequest', -'DeleteAppConnectorRequest', -'GetAppConnectorRequest', -'HealthStatus', -'ImageConfig', -'ListAppConnectorsRequest', -'ListAppConnectorsResponse', -'NotificationConfig', -'ReportStatusRequest', -'ResourceInfo', -'UpdateAppConnectorRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/gapic_metadata.json deleted file mode 100644 index 7ad9d97f23a5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/gapic_metadata.json +++ /dev/null @@ -1,118 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.beyondcorp_appconnectors_v1", - "protoPackage": "google.cloud.beyondcorp.appconnectors.v1", - "schema": "1.0", - "services": { - "AppConnectorsService": { - "clients": { - "grpc": { - "libraryClient": "AppConnectorsServiceClient", - "rpcs": { - "CreateAppConnector": { - "methods": [ - "create_app_connector" - ] - }, - "DeleteAppConnector": { - "methods": [ - "delete_app_connector" - ] - }, - "GetAppConnector": { - "methods": [ - "get_app_connector" - ] - }, - "ListAppConnectors": { - "methods": [ - "list_app_connectors" - ] - }, - "ReportStatus": { - "methods": [ - "report_status" - ] - }, - "UpdateAppConnector": { - "methods": [ - "update_app_connector" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AppConnectorsServiceAsyncClient", - "rpcs": { - "CreateAppConnector": { - "methods": [ - "create_app_connector" - ] - }, - "DeleteAppConnector": { - "methods": [ - "delete_app_connector" - ] - }, - "GetAppConnector": { - "methods": [ - "get_app_connector" - ] - }, - "ListAppConnectors": { - "methods": [ - "list_app_connectors" - ] - }, - "ReportStatus": { - "methods": [ - "report_status" - ] - }, - "UpdateAppConnector": { - "methods": [ - "update_app_connector" - ] - } - } - }, - "rest": { - "libraryClient": "AppConnectorsServiceClient", - "rpcs": { - "CreateAppConnector": { - "methods": [ - "create_app_connector" - ] - }, - "DeleteAppConnector": { - "methods": [ - "delete_app_connector" - ] - }, - "GetAppConnector": { - "methods": [ - "get_app_connector" - ] - }, - "ListAppConnectors": { - "methods": [ - "list_app_connectors" - ] - }, - "ReportStatus": { - "methods": [ - "report_status" - ] - }, - "UpdateAppConnector": { - "methods": [ - "update_app_connector" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/py.typed b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/py.typed deleted file mode 100644 index 894220ff506f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-appconnectors package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/__init__.py deleted file mode 100644 index cd5907663d9e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AppConnectorsServiceClient -from .async_client import AppConnectorsServiceAsyncClient - -__all__ = ( - 'AppConnectorsServiceClient', - 'AppConnectorsServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py deleted file mode 100644 index 0b9316a082f7..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py +++ /dev/null @@ -1,1690 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.beyondcorp_appconnectors_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service import pagers -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info as gcba_resource_info -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AppConnectorsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AppConnectorsServiceGrpcAsyncIOTransport -from .client import AppConnectorsServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AppConnectorsServiceAsyncClient: - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectorsService exposes the following resource: - - - AppConnectors, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}``. - - The AppConnectorsService provides methods to manage - (create/read/update/delete) BeyondCorp AppConnectors. - """ - - _client: AppConnectorsServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AppConnectorsServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AppConnectorsServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AppConnectorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AppConnectorsServiceClient._DEFAULT_UNIVERSE - - app_connector_path = staticmethod(AppConnectorsServiceClient.app_connector_path) - parse_app_connector_path = staticmethod(AppConnectorsServiceClient.parse_app_connector_path) - common_billing_account_path = staticmethod(AppConnectorsServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AppConnectorsServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AppConnectorsServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AppConnectorsServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AppConnectorsServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AppConnectorsServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AppConnectorsServiceClient.common_project_path) - parse_common_project_path = staticmethod(AppConnectorsServiceClient.parse_common_project_path) - common_location_path = staticmethod(AppConnectorsServiceClient.common_location_path) - parse_common_location_path = staticmethod(AppConnectorsServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectorsServiceAsyncClient: The constructed client. - """ - return AppConnectorsServiceClient.from_service_account_info.__func__(AppConnectorsServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectorsServiceAsyncClient: The constructed client. - """ - return AppConnectorsServiceClient.from_service_account_file.__func__(AppConnectorsServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AppConnectorsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AppConnectorsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AppConnectorsServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AppConnectorsServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AppConnectorsServiceTransport, Callable[..., AppConnectorsServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the app connectors service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AppConnectorsServiceTransport,Callable[..., AppConnectorsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AppConnectorsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AppConnectorsServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "credentialsType": None, - } - ) - - async def list_app_connectors(self, - request: Optional[Union[app_connectors_service.ListAppConnectorsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAppConnectorsAsyncPager: - r"""Lists AppConnectors in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - async def sample_list_app_connectors(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.ListAppConnectorsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connectors(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsRequest, dict]]): - The request object. Request message for - BeyondCorp.ListAppConnectors. - parent (:class:`str`): - Required. The resource name of the AppConnector location - using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.pagers.ListAppConnectorsAsyncPager: - Response message for - BeyondCorp.ListAppConnectors. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.ListAppConnectorsRequest): - request = app_connectors_service.ListAppConnectorsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_app_connectors] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAppConnectorsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_app_connector(self, - request: Optional[Union[app_connectors_service.GetAppConnectorRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> app_connectors_service.AppConnector: - r"""Gets details of a single AppConnector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - async def sample_get_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.GetAppConnectorRequest( - name="name_value", - ) - - # Make the request - response = await client.get_app_connector(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnectors_v1.types.GetAppConnectorRequest, dict]]): - The request object. Request message for - BeyondCorp.GetAppConnector. - name (:class:`str`): - Required. BeyondCorp AppConnector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnectors_v1.types.AppConnector: - A BeyondCorp connector resource that - represents an application facing - component deployed proximal to and with - direct access to the application - instances. It is used to establish - connectivity between the remote - enterprise environment and GCP. It - initiates connections to the - applications and can proxy the data from - users over the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.GetAppConnectorRequest): - request = app_connectors_service.GetAppConnectorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_app_connector(self, - request: Optional[Union[app_connectors_service.CreateAppConnectorRequest, dict]] = None, - *, - parent: Optional[str] = None, - app_connector: Optional[app_connectors_service.AppConnector] = None, - app_connector_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new AppConnector in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - async def sample_create_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.CreateAppConnectorRequest( - parent="parent_value", - app_connector=app_connector, - ) - - # Make the request - operation = client.create_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnectors_v1.types.CreateAppConnectorRequest, dict]]): - The request object. Request message for - BeyondCorp.CreateAppConnector. - parent (:class:`str`): - Required. The resource project name of the AppConnector - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connector (:class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector`): - Required. A BeyondCorp AppConnector - resource. - - This corresponds to the ``app_connector`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connector_id (:class:`str`): - Optional. User-settable AppConnector resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``app_connector_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector` A BeyondCorp connector resource that represents an application facing - component deployed proximal to and with direct access - to the application instances. It is used to establish - connectivity between the remote enterprise - environment and GCP. It initiates connections to the - applications and can proxy the data from users over - the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, app_connector, app_connector_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.CreateAppConnectorRequest): - request = app_connectors_service.CreateAppConnectorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if app_connector is not None: - request.app_connector = app_connector - if app_connector_id is not None: - request.app_connector_id = app_connector_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - app_connectors_service.AppConnector, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_app_connector(self, - request: Optional[Union[app_connectors_service.UpdateAppConnectorRequest, dict]] = None, - *, - app_connector: Optional[app_connectors_service.AppConnector] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single AppConnector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - async def sample_update_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.UpdateAppConnectorRequest( - app_connector=app_connector, - ) - - # Make the request - operation = client.update_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnectors_v1.types.UpdateAppConnectorRequest, dict]]): - The request object. Request message for - BeyondCorp.UpdateAppConnector. - app_connector (:class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector`): - Required. AppConnector message with updated fields. Only - supported fields specified in update_mask are updated. - - This corresponds to the ``app_connector`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. At least one path - must be supplied in this field. The elements of the - repeated paths field may only include these fields from - [BeyondCorp.AppConnector]: - - - ``labels`` - - ``display_name`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector` A BeyondCorp connector resource that represents an application facing - component deployed proximal to and with direct access - to the application instances. It is used to establish - connectivity between the remote enterprise - environment and GCP. It initiates connections to the - applications and can proxy the data from users over - the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [app_connector, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.UpdateAppConnectorRequest): - request = app_connectors_service.UpdateAppConnectorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if app_connector is not None: - request.app_connector = app_connector - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("app_connector.name", request.app_connector.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - app_connectors_service.AppConnector, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_app_connector(self, - request: Optional[Union[app_connectors_service.DeleteAppConnectorRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single AppConnector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - async def sample_delete_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.DeleteAppConnectorRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnectors_v1.types.DeleteAppConnectorRequest, dict]]): - The request object. Request message for - BeyondCorp.DeleteAppConnector. - name (:class:`str`): - Required. BeyondCorp AppConnector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.DeleteAppConnectorRequest): - request = app_connectors_service.DeleteAppConnectorRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - async def report_status(self, - request: Optional[Union[app_connectors_service.ReportStatusRequest, dict]] = None, - *, - app_connector: Optional[str] = None, - resource_info: Optional[gcba_resource_info.ResourceInfo] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Report status for a given connector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - async def sample_report_status(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - resource_info = beyondcorp_appconnectors_v1.ResourceInfo() - resource_info.id = "id_value" - - request = beyondcorp_appconnectors_v1.ReportStatusRequest( - app_connector="app_connector_value", - resource_info=resource_info, - ) - - # Make the request - operation = client.report_status(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appconnectors_v1.types.ReportStatusRequest, dict]]): - The request object. Request report the connector status. - app_connector (:class:`str`): - Required. BeyondCorp Connector name using the form: - ``projects/{project_id}/locations/{location_id}/connectors/{connector}`` - - This corresponds to the ``app_connector`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource_info (:class:`google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo`): - Required. Resource info of the - connector. - - This corresponds to the ``resource_info`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector` A BeyondCorp connector resource that represents an application facing - component deployed proximal to and with direct access - to the application instances. It is used to establish - connectivity between the remote enterprise - environment and GCP. It initiates connections to the - applications and can proxy the data from users over - the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [app_connector, resource_info] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.ReportStatusRequest): - request = app_connectors_service.ReportStatusRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if app_connector is not None: - request.app_connector = app_connector - if resource_info is not None: - request.resource_info = resource_info - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.report_status] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("app_connector", request.app_connector), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - app_connectors_service.AppConnector, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AppConnectorsServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AppConnectorsServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py deleted file mode 100644 index d81dcda67287..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py +++ /dev/null @@ -1,2069 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.beyondcorp_appconnectors_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service import pagers -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info as gcba_resource_info -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AppConnectorsServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AppConnectorsServiceGrpcTransport -from .transports.grpc_asyncio import AppConnectorsServiceGrpcAsyncIOTransport -from .transports.rest import AppConnectorsServiceRestTransport - - -class AppConnectorsServiceClientMeta(type): - """Metaclass for the AppConnectorsService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AppConnectorsServiceTransport]] - _transport_registry["grpc"] = AppConnectorsServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AppConnectorsServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AppConnectorsServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AppConnectorsServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AppConnectorsServiceClient(metaclass=AppConnectorsServiceClientMeta): - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectorsService exposes the following resource: - - - AppConnectors, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}``. - - The AppConnectorsService provides methods to manage - (create/read/update/delete) BeyondCorp AppConnectors. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "beyondcorp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "beyondcorp.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectorsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppConnectorsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AppConnectorsServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AppConnectorsServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def app_connector_path(project: str,location: str,app_connector: str,) -> str: - """Returns a fully-qualified app_connector string.""" - return "projects/{project}/locations/{location}/appConnectors/{app_connector}".format(project=project, location=location, app_connector=app_connector, ) - - @staticmethod - def parse_app_connector_path(path: str) -> Dict[str,str]: - """Parses a app_connector path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/appConnectors/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AppConnectorsServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AppConnectorsServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AppConnectorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AppConnectorsServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AppConnectorsServiceTransport, Callable[..., AppConnectorsServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the app connectors service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AppConnectorsServiceTransport,Callable[..., AppConnectorsServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AppConnectorsServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AppConnectorsServiceClient._read_environment_variables() - self._client_cert_source = AppConnectorsServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AppConnectorsServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AppConnectorsServiceTransport) - if transport_provided: - # transport is a AppConnectorsServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AppConnectorsServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AppConnectorsServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AppConnectorsServiceTransport], Callable[..., AppConnectorsServiceTransport]] = ( - AppConnectorsServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AppConnectorsServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "credentialsType": None, - } - ) - - def list_app_connectors(self, - request: Optional[Union[app_connectors_service.ListAppConnectorsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAppConnectorsPager: - r"""Lists AppConnectors in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - def sample_list_app_connectors(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.ListAppConnectorsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connectors(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsRequest, dict]): - The request object. Request message for - BeyondCorp.ListAppConnectors. - parent (str): - Required. The resource name of the AppConnector location - using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.pagers.ListAppConnectorsPager: - Response message for - BeyondCorp.ListAppConnectors. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.ListAppConnectorsRequest): - request = app_connectors_service.ListAppConnectorsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_app_connectors] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAppConnectorsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_app_connector(self, - request: Optional[Union[app_connectors_service.GetAppConnectorRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> app_connectors_service.AppConnector: - r"""Gets details of a single AppConnector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - def sample_get_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.GetAppConnectorRequest( - name="name_value", - ) - - # Make the request - response = client.get_app_connector(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnectors_v1.types.GetAppConnectorRequest, dict]): - The request object. Request message for - BeyondCorp.GetAppConnector. - name (str): - Required. BeyondCorp AppConnector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appconnectors_v1.types.AppConnector: - A BeyondCorp connector resource that - represents an application facing - component deployed proximal to and with - direct access to the application - instances. It is used to establish - connectivity between the remote - enterprise environment and GCP. It - initiates connections to the - applications and can proxy the data from - users over the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.GetAppConnectorRequest): - request = app_connectors_service.GetAppConnectorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_app_connector(self, - request: Optional[Union[app_connectors_service.CreateAppConnectorRequest, dict]] = None, - *, - parent: Optional[str] = None, - app_connector: Optional[app_connectors_service.AppConnector] = None, - app_connector_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new AppConnector in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - def sample_create_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.CreateAppConnectorRequest( - parent="parent_value", - app_connector=app_connector, - ) - - # Make the request - operation = client.create_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnectors_v1.types.CreateAppConnectorRequest, dict]): - The request object. Request message for - BeyondCorp.CreateAppConnector. - parent (str): - Required. The resource project name of the AppConnector - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connector (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector): - Required. A BeyondCorp AppConnector - resource. - - This corresponds to the ``app_connector`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_connector_id (str): - Optional. User-settable AppConnector resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``app_connector_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector` A BeyondCorp connector resource that represents an application facing - component deployed proximal to and with direct access - to the application instances. It is used to establish - connectivity between the remote enterprise - environment and GCP. It initiates connections to the - applications and can proxy the data from users over - the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, app_connector, app_connector_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.CreateAppConnectorRequest): - request = app_connectors_service.CreateAppConnectorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if app_connector is not None: - request.app_connector = app_connector - if app_connector_id is not None: - request.app_connector_id = app_connector_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - app_connectors_service.AppConnector, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - def update_app_connector(self, - request: Optional[Union[app_connectors_service.UpdateAppConnectorRequest, dict]] = None, - *, - app_connector: Optional[app_connectors_service.AppConnector] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the parameters of a single AppConnector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - def sample_update_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.UpdateAppConnectorRequest( - app_connector=app_connector, - ) - - # Make the request - operation = client.update_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnectors_v1.types.UpdateAppConnectorRequest, dict]): - The request object. Request message for - BeyondCorp.UpdateAppConnector. - app_connector (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector): - Required. AppConnector message with updated fields. Only - supported fields specified in update_mask are updated. - - This corresponds to the ``app_connector`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. At least one path - must be supplied in this field. The elements of the - repeated paths field may only include these fields from - [BeyondCorp.AppConnector]: - - - ``labels`` - - ``display_name`` - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector` A BeyondCorp connector resource that represents an application facing - component deployed proximal to and with direct access - to the application instances. It is used to establish - connectivity between the remote enterprise - environment and GCP. It initiates connections to the - applications and can proxy the data from users over - the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [app_connector, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.UpdateAppConnectorRequest): - request = app_connectors_service.UpdateAppConnectorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if app_connector is not None: - request.app_connector = app_connector - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("app_connector.name", request.app_connector.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - app_connectors_service.AppConnector, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_app_connector(self, - request: Optional[Union[app_connectors_service.DeleteAppConnectorRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single AppConnector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - def sample_delete_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.DeleteAppConnectorRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnectors_v1.types.DeleteAppConnectorRequest, dict]): - The request object. Request message for - BeyondCorp.DeleteAppConnector. - name (str): - Required. BeyondCorp AppConnector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.DeleteAppConnectorRequest): - request = app_connectors_service.DeleteAppConnectorRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_app_connector] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - def report_status(self, - request: Optional[Union[app_connectors_service.ReportStatusRequest, dict]] = None, - *, - app_connector: Optional[str] = None, - resource_info: Optional[gcba_resource_info.ResourceInfo] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Report status for a given connector. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appconnectors_v1 - - def sample_report_status(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - resource_info = beyondcorp_appconnectors_v1.ResourceInfo() - resource_info.id = "id_value" - - request = beyondcorp_appconnectors_v1.ReportStatusRequest( - app_connector="app_connector_value", - resource_info=resource_info, - ) - - # Make the request - operation = client.report_status(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appconnectors_v1.types.ReportStatusRequest, dict]): - The request object. Request report the connector status. - app_connector (str): - Required. BeyondCorp Connector name using the form: - ``projects/{project_id}/locations/{location_id}/connectors/{connector}`` - - This corresponds to the ``app_connector`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - resource_info (google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo): - Required. Resource info of the - connector. - - This corresponds to the ``resource_info`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appconnectors_v1.types.AppConnector` A BeyondCorp connector resource that represents an application facing - component deployed proximal to and with direct access - to the application instances. It is used to establish - connectivity between the remote enterprise - environment and GCP. It initiates connections to the - applications and can proxy the data from users over - the connection. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [app_connector, resource_info] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_connectors_service.ReportStatusRequest): - request = app_connectors_service.ReportStatusRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if app_connector is not None: - request.app_connector = app_connector - if resource_info is not None: - request.resource_info = resource_info - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.report_status] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("app_connector", request.app_connector), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - app_connectors_service.AppConnector, - metadata_type=app_connectors_service.AppConnectorOperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AppConnectorsServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AppConnectorsServiceClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/pagers.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/pagers.py deleted file mode 100644 index 757156040354..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service - - -class ListAppConnectorsPager: - """A pager for iterating through ``list_app_connectors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``app_connectors`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAppConnectors`` requests and continue to iterate - through the ``app_connectors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., app_connectors_service.ListAppConnectorsResponse], - request: app_connectors_service.ListAppConnectorsRequest, - response: app_connectors_service.ListAppConnectorsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsRequest): - The initial request object. - response (google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_connectors_service.ListAppConnectorsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[app_connectors_service.ListAppConnectorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[app_connectors_service.AppConnector]: - for page in self.pages: - yield from page.app_connectors - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAppConnectorsAsyncPager: - """A pager for iterating through ``list_app_connectors`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``app_connectors`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAppConnectors`` requests and continue to iterate - through the ``app_connectors`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[app_connectors_service.ListAppConnectorsResponse]], - request: app_connectors_service.ListAppConnectorsRequest, - response: app_connectors_service.ListAppConnectorsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsRequest): - The initial request object. - response (google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_connectors_service.ListAppConnectorsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[app_connectors_service.ListAppConnectorsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[app_connectors_service.AppConnector]: - async def async_generator(): - async for page in self.pages: - for response in page.app_connectors: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/README.rst b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/README.rst deleted file mode 100644 index 511cfe7332a4..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AppConnectorsServiceTransport` is the ABC for all transports. -- public child `AppConnectorsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AppConnectorsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAppConnectorsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AppConnectorsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/__init__.py deleted file mode 100644 index b5dd6acd9cd8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AppConnectorsServiceTransport -from .grpc import AppConnectorsServiceGrpcTransport -from .grpc_asyncio import AppConnectorsServiceGrpcAsyncIOTransport -from .rest import AppConnectorsServiceRestTransport -from .rest import AppConnectorsServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AppConnectorsServiceTransport]] -_transport_registry['grpc'] = AppConnectorsServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AppConnectorsServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AppConnectorsServiceRestTransport - -__all__ = ( - 'AppConnectorsServiceTransport', - 'AppConnectorsServiceGrpcTransport', - 'AppConnectorsServiceGrpcAsyncIOTransport', - 'AppConnectorsServiceRestTransport', - 'AppConnectorsServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/base.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/base.py deleted file mode 100644 index e1a860da7ee9..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/base.py +++ /dev/null @@ -1,361 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.beyondcorp_appconnectors_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AppConnectorsServiceTransport(abc.ABC): - """Abstract transport class for AppConnectorsService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'beyondcorp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_app_connectors: gapic_v1.method.wrap_method( - self.list_app_connectors, - default_timeout=None, - client_info=client_info, - ), - self.get_app_connector: gapic_v1.method.wrap_method( - self.get_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.create_app_connector: gapic_v1.method.wrap_method( - self.create_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.update_app_connector: gapic_v1.method.wrap_method( - self.update_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.delete_app_connector: gapic_v1.method.wrap_method( - self.delete_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.report_status: gapic_v1.method.wrap_method( - self.report_status, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_app_connectors(self) -> Callable[ - [app_connectors_service.ListAppConnectorsRequest], - Union[ - app_connectors_service.ListAppConnectorsResponse, - Awaitable[app_connectors_service.ListAppConnectorsResponse] - ]]: - raise NotImplementedError() - - @property - def get_app_connector(self) -> Callable[ - [app_connectors_service.GetAppConnectorRequest], - Union[ - app_connectors_service.AppConnector, - Awaitable[app_connectors_service.AppConnector] - ]]: - raise NotImplementedError() - - @property - def create_app_connector(self) -> Callable[ - [app_connectors_service.CreateAppConnectorRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_app_connector(self) -> Callable[ - [app_connectors_service.UpdateAppConnectorRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_app_connector(self) -> Callable[ - [app_connectors_service.DeleteAppConnectorRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def report_status(self) -> Callable[ - [app_connectors_service.ReportStatusRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AppConnectorsServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/grpc.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/grpc.py deleted file mode 100644 index fe7fd312c0d3..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/grpc.py +++ /dev/null @@ -1,700 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import AppConnectorsServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AppConnectorsServiceGrpcTransport(AppConnectorsServiceTransport): - """gRPC backend transport for AppConnectorsService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectorsService exposes the following resource: - - - AppConnectors, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}``. - - The AppConnectorsService provides methods to manage - (create/read/update/delete) BeyondCorp AppConnectors. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_app_connectors(self) -> Callable[ - [app_connectors_service.ListAppConnectorsRequest], - app_connectors_service.ListAppConnectorsResponse]: - r"""Return a callable for the list app connectors method over gRPC. - - Lists AppConnectors in a given project and location. - - Returns: - Callable[[~.ListAppConnectorsRequest], - ~.ListAppConnectorsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_app_connectors' not in self._stubs: - self._stubs['list_app_connectors'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/ListAppConnectors', - request_serializer=app_connectors_service.ListAppConnectorsRequest.serialize, - response_deserializer=app_connectors_service.ListAppConnectorsResponse.deserialize, - ) - return self._stubs['list_app_connectors'] - - @property - def get_app_connector(self) -> Callable[ - [app_connectors_service.GetAppConnectorRequest], - app_connectors_service.AppConnector]: - r"""Return a callable for the get app connector method over gRPC. - - Gets details of a single AppConnector. - - Returns: - Callable[[~.GetAppConnectorRequest], - ~.AppConnector]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_app_connector' not in self._stubs: - self._stubs['get_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/GetAppConnector', - request_serializer=app_connectors_service.GetAppConnectorRequest.serialize, - response_deserializer=app_connectors_service.AppConnector.deserialize, - ) - return self._stubs['get_app_connector'] - - @property - def create_app_connector(self) -> Callable[ - [app_connectors_service.CreateAppConnectorRequest], - operations_pb2.Operation]: - r"""Return a callable for the create app connector method over gRPC. - - Creates a new AppConnector in a given project and - location. - - Returns: - Callable[[~.CreateAppConnectorRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_app_connector' not in self._stubs: - self._stubs['create_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/CreateAppConnector', - request_serializer=app_connectors_service.CreateAppConnectorRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_app_connector'] - - @property - def update_app_connector(self) -> Callable[ - [app_connectors_service.UpdateAppConnectorRequest], - operations_pb2.Operation]: - r"""Return a callable for the update app connector method over gRPC. - - Updates the parameters of a single AppConnector. - - Returns: - Callable[[~.UpdateAppConnectorRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_app_connector' not in self._stubs: - self._stubs['update_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/UpdateAppConnector', - request_serializer=app_connectors_service.UpdateAppConnectorRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_app_connector'] - - @property - def delete_app_connector(self) -> Callable[ - [app_connectors_service.DeleteAppConnectorRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete app connector method over gRPC. - - Deletes a single AppConnector. - - Returns: - Callable[[~.DeleteAppConnectorRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_app_connector' not in self._stubs: - self._stubs['delete_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/DeleteAppConnector', - request_serializer=app_connectors_service.DeleteAppConnectorRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_app_connector'] - - @property - def report_status(self) -> Callable[ - [app_connectors_service.ReportStatusRequest], - operations_pb2.Operation]: - r"""Return a callable for the report status method over gRPC. - - Report status for a given connector. - - Returns: - Callable[[~.ReportStatusRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'report_status' not in self._stubs: - self._stubs['report_status'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/ReportStatus', - request_serializer=app_connectors_service.ReportStatusRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['report_status'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AppConnectorsServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/grpc_asyncio.py deleted file mode 100644 index 9c9474d118d4..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,790 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import AppConnectorsServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AppConnectorsServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AppConnectorsServiceGrpcAsyncIOTransport(AppConnectorsServiceTransport): - """gRPC AsyncIO backend transport for AppConnectorsService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectorsService exposes the following resource: - - - AppConnectors, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}``. - - The AppConnectorsService provides methods to manage - (create/read/update/delete) BeyondCorp AppConnectors. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_app_connectors(self) -> Callable[ - [app_connectors_service.ListAppConnectorsRequest], - Awaitable[app_connectors_service.ListAppConnectorsResponse]]: - r"""Return a callable for the list app connectors method over gRPC. - - Lists AppConnectors in a given project and location. - - Returns: - Callable[[~.ListAppConnectorsRequest], - Awaitable[~.ListAppConnectorsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_app_connectors' not in self._stubs: - self._stubs['list_app_connectors'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/ListAppConnectors', - request_serializer=app_connectors_service.ListAppConnectorsRequest.serialize, - response_deserializer=app_connectors_service.ListAppConnectorsResponse.deserialize, - ) - return self._stubs['list_app_connectors'] - - @property - def get_app_connector(self) -> Callable[ - [app_connectors_service.GetAppConnectorRequest], - Awaitable[app_connectors_service.AppConnector]]: - r"""Return a callable for the get app connector method over gRPC. - - Gets details of a single AppConnector. - - Returns: - Callable[[~.GetAppConnectorRequest], - Awaitable[~.AppConnector]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_app_connector' not in self._stubs: - self._stubs['get_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/GetAppConnector', - request_serializer=app_connectors_service.GetAppConnectorRequest.serialize, - response_deserializer=app_connectors_service.AppConnector.deserialize, - ) - return self._stubs['get_app_connector'] - - @property - def create_app_connector(self) -> Callable[ - [app_connectors_service.CreateAppConnectorRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create app connector method over gRPC. - - Creates a new AppConnector in a given project and - location. - - Returns: - Callable[[~.CreateAppConnectorRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_app_connector' not in self._stubs: - self._stubs['create_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/CreateAppConnector', - request_serializer=app_connectors_service.CreateAppConnectorRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_app_connector'] - - @property - def update_app_connector(self) -> Callable[ - [app_connectors_service.UpdateAppConnectorRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update app connector method over gRPC. - - Updates the parameters of a single AppConnector. - - Returns: - Callable[[~.UpdateAppConnectorRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_app_connector' not in self._stubs: - self._stubs['update_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/UpdateAppConnector', - request_serializer=app_connectors_service.UpdateAppConnectorRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_app_connector'] - - @property - def delete_app_connector(self) -> Callable[ - [app_connectors_service.DeleteAppConnectorRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete app connector method over gRPC. - - Deletes a single AppConnector. - - Returns: - Callable[[~.DeleteAppConnectorRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_app_connector' not in self._stubs: - self._stubs['delete_app_connector'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/DeleteAppConnector', - request_serializer=app_connectors_service.DeleteAppConnectorRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_app_connector'] - - @property - def report_status(self) -> Callable[ - [app_connectors_service.ReportStatusRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the report status method over gRPC. - - Report status for a given connector. - - Returns: - Callable[[~.ReportStatusRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'report_status' not in self._stubs: - self._stubs['report_status'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService/ReportStatus', - request_serializer=app_connectors_service.ReportStatusRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['report_status'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_app_connectors: self._wrap_method( - self.list_app_connectors, - default_timeout=None, - client_info=client_info, - ), - self.get_app_connector: self._wrap_method( - self.get_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.create_app_connector: self._wrap_method( - self.create_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.update_app_connector: self._wrap_method( - self.update_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.delete_app_connector: self._wrap_method( - self.delete_app_connector, - default_timeout=None, - client_info=client_info, - ), - self.report_status: self._wrap_method( - self.report_status, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'AppConnectorsServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py deleted file mode 100644 index 06f25b1d3772..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest.py +++ /dev/null @@ -1,2509 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAppConnectorsServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AppConnectorsServiceRestInterceptor: - """Interceptor for AppConnectorsService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AppConnectorsServiceRestTransport. - - .. code-block:: python - class MyCustomAppConnectorsServiceInterceptor(AppConnectorsServiceRestInterceptor): - def pre_create_app_connector(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_app_connector(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_app_connector(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_app_connector(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_app_connector(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_app_connector(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_app_connectors(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_app_connectors(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_report_status(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_report_status(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_app_connector(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_app_connector(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AppConnectorsServiceRestTransport(interceptor=MyCustomAppConnectorsServiceInterceptor()) - client = AppConnectorsServiceClient(transport=transport) - - - """ - def pre_create_app_connector(self, request: app_connectors_service.CreateAppConnectorRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.CreateAppConnectorRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_app_connector - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_create_app_connector(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_app_connector - - DEPRECATED. Please use the `post_create_app_connector_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. This `post_create_app_connector` interceptor runs - before the `post_create_app_connector_with_metadata` interceptor. - """ - return response - - def post_create_app_connector_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_app_connector - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectorsService server but before it is returned to user code. - - We recommend only using this `post_create_app_connector_with_metadata` - interceptor in new development instead of the `post_create_app_connector` interceptor. - When both interceptors are used, this `post_create_app_connector_with_metadata` interceptor runs after the - `post_create_app_connector` interceptor. The (possibly modified) response returned by - `post_create_app_connector` will be passed to - `post_create_app_connector_with_metadata`. - """ - return response, metadata - - def pre_delete_app_connector(self, request: app_connectors_service.DeleteAppConnectorRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.DeleteAppConnectorRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_app_connector - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_delete_app_connector(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_app_connector - - DEPRECATED. Please use the `post_delete_app_connector_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. This `post_delete_app_connector` interceptor runs - before the `post_delete_app_connector_with_metadata` interceptor. - """ - return response - - def post_delete_app_connector_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_app_connector - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectorsService server but before it is returned to user code. - - We recommend only using this `post_delete_app_connector_with_metadata` - interceptor in new development instead of the `post_delete_app_connector` interceptor. - When both interceptors are used, this `post_delete_app_connector_with_metadata` interceptor runs after the - `post_delete_app_connector` interceptor. The (possibly modified) response returned by - `post_delete_app_connector` will be passed to - `post_delete_app_connector_with_metadata`. - """ - return response, metadata - - def pre_get_app_connector(self, request: app_connectors_service.GetAppConnectorRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.GetAppConnectorRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_app_connector - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_get_app_connector(self, response: app_connectors_service.AppConnector) -> app_connectors_service.AppConnector: - """Post-rpc interceptor for get_app_connector - - DEPRECATED. Please use the `post_get_app_connector_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. This `post_get_app_connector` interceptor runs - before the `post_get_app_connector_with_metadata` interceptor. - """ - return response - - def post_get_app_connector_with_metadata(self, response: app_connectors_service.AppConnector, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.AppConnector, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_app_connector - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectorsService server but before it is returned to user code. - - We recommend only using this `post_get_app_connector_with_metadata` - interceptor in new development instead of the `post_get_app_connector` interceptor. - When both interceptors are used, this `post_get_app_connector_with_metadata` interceptor runs after the - `post_get_app_connector` interceptor. The (possibly modified) response returned by - `post_get_app_connector` will be passed to - `post_get_app_connector_with_metadata`. - """ - return response, metadata - - def pre_list_app_connectors(self, request: app_connectors_service.ListAppConnectorsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.ListAppConnectorsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_app_connectors - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_list_app_connectors(self, response: app_connectors_service.ListAppConnectorsResponse) -> app_connectors_service.ListAppConnectorsResponse: - """Post-rpc interceptor for list_app_connectors - - DEPRECATED. Please use the `post_list_app_connectors_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. This `post_list_app_connectors` interceptor runs - before the `post_list_app_connectors_with_metadata` interceptor. - """ - return response - - def post_list_app_connectors_with_metadata(self, response: app_connectors_service.ListAppConnectorsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.ListAppConnectorsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_app_connectors - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectorsService server but before it is returned to user code. - - We recommend only using this `post_list_app_connectors_with_metadata` - interceptor in new development instead of the `post_list_app_connectors` interceptor. - When both interceptors are used, this `post_list_app_connectors_with_metadata` interceptor runs after the - `post_list_app_connectors` interceptor. The (possibly modified) response returned by - `post_list_app_connectors` will be passed to - `post_list_app_connectors_with_metadata`. - """ - return response, metadata - - def pre_report_status(self, request: app_connectors_service.ReportStatusRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.ReportStatusRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for report_status - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_report_status(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for report_status - - DEPRECATED. Please use the `post_report_status_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. This `post_report_status` interceptor runs - before the `post_report_status_with_metadata` interceptor. - """ - return response - - def post_report_status_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for report_status - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectorsService server but before it is returned to user code. - - We recommend only using this `post_report_status_with_metadata` - interceptor in new development instead of the `post_report_status` interceptor. - When both interceptors are used, this `post_report_status_with_metadata` interceptor runs after the - `post_report_status` interceptor. The (possibly modified) response returned by - `post_report_status` will be passed to - `post_report_status_with_metadata`. - """ - return response, metadata - - def pre_update_app_connector(self, request: app_connectors_service.UpdateAppConnectorRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_connectors_service.UpdateAppConnectorRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_app_connector - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_update_app_connector(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_app_connector - - DEPRECATED. Please use the `post_update_app_connector_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. This `post_update_app_connector` interceptor runs - before the `post_update_app_connector_with_metadata` interceptor. - """ - return response - - def post_update_app_connector_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_app_connector - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppConnectorsService server but before it is returned to user code. - - We recommend only using this `post_update_app_connector_with_metadata` - interceptor in new development instead of the `post_update_app_connector` interceptor. - When both interceptors are used, this `post_update_app_connector_with_metadata` interceptor runs after the - `post_update_app_connector` interceptor. The (possibly modified) response returned by - `post_update_app_connector` will be passed to - `post_update_app_connector_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppConnectorsService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the AppConnectorsService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AppConnectorsServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AppConnectorsServiceRestInterceptor - - -class AppConnectorsServiceRestTransport(_BaseAppConnectorsServiceRestTransport): - """REST backend synchronous transport for AppConnectorsService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppConnectorsService exposes the following resource: - - - AppConnectors, named as follows: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}``. - - The AppConnectorsService provides methods to manage - (create/read/update/delete) BeyondCorp AppConnectors. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AppConnectorsServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AppConnectorsServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateAppConnector(_BaseAppConnectorsServiceRestTransport._BaseCreateAppConnector, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.CreateAppConnector") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: app_connectors_service.CreateAppConnectorRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create app connector method over HTTP. - - Args: - request (~.app_connectors_service.CreateAppConnectorRequest): - The request object. Request message for - BeyondCorp.CreateAppConnector. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseCreateAppConnector._get_http_options() - - request, metadata = self._interceptor.pre_create_app_connector(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseCreateAppConnector._get_transcoded_request(http_options, request) - - body = _BaseAppConnectorsServiceRestTransport._BaseCreateAppConnector._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseCreateAppConnector._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.CreateAppConnector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "CreateAppConnector", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._CreateAppConnector._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_app_connector(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_app_connector_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.create_app_connector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "CreateAppConnector", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteAppConnector(_BaseAppConnectorsServiceRestTransport._BaseDeleteAppConnector, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.DeleteAppConnector") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connectors_service.DeleteAppConnectorRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete app connector method over HTTP. - - Args: - request (~.app_connectors_service.DeleteAppConnectorRequest): - The request object. Request message for - BeyondCorp.DeleteAppConnector. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseDeleteAppConnector._get_http_options() - - request, metadata = self._interceptor.pre_delete_app_connector(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseDeleteAppConnector._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseDeleteAppConnector._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.DeleteAppConnector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "DeleteAppConnector", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._DeleteAppConnector._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_app_connector(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_app_connector_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.delete_app_connector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "DeleteAppConnector", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAppConnector(_BaseAppConnectorsServiceRestTransport._BaseGetAppConnector, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.GetAppConnector") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connectors_service.GetAppConnectorRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_connectors_service.AppConnector: - r"""Call the get app connector method over HTTP. - - Args: - request (~.app_connectors_service.GetAppConnectorRequest): - The request object. Request message for - BeyondCorp.GetAppConnector. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_connectors_service.AppConnector: - A BeyondCorp connector resource that - represents an application facing - component deployed proximal to and with - direct access to the application - instances. It is used to establish - connectivity between the remote - enterprise environment and GCP. It - initiates connections to the - applications and can proxy the data from - users over the connection. - - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseGetAppConnector._get_http_options() - - request, metadata = self._interceptor.pre_get_app_connector(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseGetAppConnector._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseGetAppConnector._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.GetAppConnector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetAppConnector", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._GetAppConnector._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_connectors_service.AppConnector() - pb_resp = app_connectors_service.AppConnector.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_app_connector(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_app_connector_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_connectors_service.AppConnector.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.get_app_connector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetAppConnector", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAppConnectors(_BaseAppConnectorsServiceRestTransport._BaseListAppConnectors, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.ListAppConnectors") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_connectors_service.ListAppConnectorsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_connectors_service.ListAppConnectorsResponse: - r"""Call the list app connectors method over HTTP. - - Args: - request (~.app_connectors_service.ListAppConnectorsRequest): - The request object. Request message for - BeyondCorp.ListAppConnectors. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_connectors_service.ListAppConnectorsResponse: - Response message for - BeyondCorp.ListAppConnectors. - - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseListAppConnectors._get_http_options() - - request, metadata = self._interceptor.pre_list_app_connectors(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseListAppConnectors._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseListAppConnectors._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.ListAppConnectors", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ListAppConnectors", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._ListAppConnectors._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_connectors_service.ListAppConnectorsResponse() - pb_resp = app_connectors_service.ListAppConnectorsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_app_connectors(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_app_connectors_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_connectors_service.ListAppConnectorsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.list_app_connectors", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ListAppConnectors", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ReportStatus(_BaseAppConnectorsServiceRestTransport._BaseReportStatus, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.ReportStatus") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: app_connectors_service.ReportStatusRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the report status method over HTTP. - - Args: - request (~.app_connectors_service.ReportStatusRequest): - The request object. Request report the connector status. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseReportStatus._get_http_options() - - request, metadata = self._interceptor.pre_report_status(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseReportStatus._get_transcoded_request(http_options, request) - - body = _BaseAppConnectorsServiceRestTransport._BaseReportStatus._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseReportStatus._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.ReportStatus", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ReportStatus", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._ReportStatus._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_report_status(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_report_status_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.report_status", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ReportStatus", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateAppConnector(_BaseAppConnectorsServiceRestTransport._BaseUpdateAppConnector, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.UpdateAppConnector") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: app_connectors_service.UpdateAppConnectorRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update app connector method over HTTP. - - Args: - request (~.app_connectors_service.UpdateAppConnectorRequest): - The request object. Request message for - BeyondCorp.UpdateAppConnector. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseUpdateAppConnector._get_http_options() - - request, metadata = self._interceptor.pre_update_app_connector(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseUpdateAppConnector._get_transcoded_request(http_options, request) - - body = _BaseAppConnectorsServiceRestTransport._BaseUpdateAppConnector._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseUpdateAppConnector._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.UpdateAppConnector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "UpdateAppConnector", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._UpdateAppConnector._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_app_connector(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_app_connector_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.update_app_connector", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "UpdateAppConnector", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_app_connector(self) -> Callable[ - [app_connectors_service.CreateAppConnectorRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAppConnector(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_app_connector(self) -> Callable[ - [app_connectors_service.DeleteAppConnectorRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAppConnector(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_app_connector(self) -> Callable[ - [app_connectors_service.GetAppConnectorRequest], - app_connectors_service.AppConnector]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAppConnector(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_app_connectors(self) -> Callable[ - [app_connectors_service.ListAppConnectorsRequest], - app_connectors_service.ListAppConnectorsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAppConnectors(self._session, self._host, self._interceptor) # type: ignore - - @property - def report_status(self) -> Callable[ - [app_connectors_service.ReportStatusRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ReportStatus(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_app_connector(self) -> Callable[ - [app_connectors_service.UpdateAppConnectorRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateAppConnector(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseAppConnectorsServiceRestTransport._BaseGetLocation, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseAppConnectorsServiceRestTransport._BaseListLocations, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(_BaseAppConnectorsServiceRestTransport._BaseGetIamPolicy, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(_BaseAppConnectorsServiceRestTransport._BaseSetIamPolicy, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseAppConnectorsServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "SetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(_BaseAppConnectorsServiceRestTransport._BaseTestIamPermissions, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseAppConnectorsServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "TestIamPermissions", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseAppConnectorsServiceRestTransport._BaseCancelOperation, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseAppConnectorsServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseAppConnectorsServiceRestTransport._BaseDeleteOperation, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAppConnectorsServiceRestTransport._BaseGetOperation, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseAppConnectorsServiceRestTransport._BaseListOperations, AppConnectorsServiceRestStub): - def __hash__(self): - return hash("AppConnectorsServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseAppConnectorsServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseAppConnectorsServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppConnectorsServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppConnectorsServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appconnectors_v1.AppConnectorsServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AppConnectorsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest_base.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest_base.py deleted file mode 100644 index 5b9956fa606e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/transports/rest_base.py +++ /dev/null @@ -1,643 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import AppConnectorsServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAppConnectorsServiceRestTransport(AppConnectorsServiceTransport): - """Base REST backend transport for AppConnectorsService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateAppConnector: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/appConnectors', - 'body': 'app_connector', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connectors_service.CreateAppConnectorRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectorsServiceRestTransport._BaseCreateAppConnector._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteAppConnector: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/appConnectors/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connectors_service.DeleteAppConnectorRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectorsServiceRestTransport._BaseDeleteAppConnector._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAppConnector: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/appConnectors/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connectors_service.GetAppConnectorRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectorsServiceRestTransport._BaseGetAppConnector._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAppConnectors: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/appConnectors', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connectors_service.ListAppConnectorsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectorsServiceRestTransport._BaseListAppConnectors._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseReportStatus: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{app_connector=projects/*/locations/*/appConnectors/*}:reportStatus', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connectors_service.ReportStatusRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectorsServiceRestTransport._BaseReportStatus._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateAppConnector: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{app_connector.name=projects/*/locations/*/appConnectors/*}', - 'body': 'app_connector', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_connectors_service.UpdateAppConnectorRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppConnectorsServiceRestTransport._BaseUpdateAppConnector._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAppConnectorsServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/__init__.py deleted file mode 100644 index 1be822290f91..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .app_connector_instance_config import ( - AppConnectorInstanceConfig, - ImageConfig, - NotificationConfig, -) -from .app_connectors_service import ( - AppConnector, - AppConnectorOperationMetadata, - CreateAppConnectorRequest, - DeleteAppConnectorRequest, - GetAppConnectorRequest, - ListAppConnectorsRequest, - ListAppConnectorsResponse, - ReportStatusRequest, - UpdateAppConnectorRequest, -) -from .resource_info import ( - ResourceInfo, - HealthStatus, -) - -__all__ = ( - 'AppConnectorInstanceConfig', - 'ImageConfig', - 'NotificationConfig', - 'AppConnector', - 'AppConnectorOperationMetadata', - 'CreateAppConnectorRequest', - 'DeleteAppConnectorRequest', - 'GetAppConnectorRequest', - 'ListAppConnectorsRequest', - 'ListAppConnectorsResponse', - 'ReportStatusRequest', - 'UpdateAppConnectorRequest', - 'ResourceInfo', - 'HealthStatus', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/app_connector_instance_config.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/app_connector_instance_config.py deleted file mode 100644 index c95b3f52fcc2..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/app_connector_instance_config.py +++ /dev/null @@ -1,137 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import any_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.appconnectors.v1', - manifest={ - 'AppConnectorInstanceConfig', - 'NotificationConfig', - 'ImageConfig', - }, -) - - -class AppConnectorInstanceConfig(proto.Message): - r"""AppConnectorInstanceConfig defines the instance config of a - AppConnector. - - Attributes: - sequence_number (int): - Required. A monotonically increasing number - generated and maintained by the API provider. - Every time a config changes in the backend, the - sequenceNumber should be bumped up to reflect - the change. - instance_config (google.protobuf.any_pb2.Any): - The SLM instance agent configuration. - notification_config (google.cloud.beyondcorp_appconnectors_v1.types.NotificationConfig): - NotificationConfig defines the notification - mechanism that the remote instance should - subscribe to in order to receive notification. - image_config (google.cloud.beyondcorp_appconnectors_v1.types.ImageConfig): - ImageConfig defines the GCR images to run for - the remote agent's control plane. - """ - - sequence_number: int = proto.Field( - proto.INT64, - number=1, - ) - instance_config: any_pb2.Any = proto.Field( - proto.MESSAGE, - number=2, - message=any_pb2.Any, - ) - notification_config: 'NotificationConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='NotificationConfig', - ) - image_config: 'ImageConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='ImageConfig', - ) - - -class NotificationConfig(proto.Message): - r"""NotificationConfig defines the mechanisms to notify instance - agent. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - pubsub_notification (google.cloud.beyondcorp_appconnectors_v1.types.NotificationConfig.CloudPubSubNotificationConfig): - Cloud Pub/Sub Configuration to receive - notifications. - - This field is a member of `oneof`_ ``config``. - """ - - class CloudPubSubNotificationConfig(proto.Message): - r"""The configuration for Pub/Sub messaging for the AppConnector. - - Attributes: - pubsub_subscription (str): - The Pub/Sub subscription the AppConnector - uses to receive notifications. - """ - - pubsub_subscription: str = proto.Field( - proto.STRING, - number=1, - ) - - pubsub_notification: CloudPubSubNotificationConfig = proto.Field( - proto.MESSAGE, - number=1, - oneof='config', - message=CloudPubSubNotificationConfig, - ) - - -class ImageConfig(proto.Message): - r"""ImageConfig defines the control plane images to run. - - Attributes: - target_image (str): - The initial image the remote agent will - attempt to run for the control plane. - stable_image (str): - The stable image that the remote agent will - fallback to if the target image fails. - """ - - target_image: str = proto.Field( - proto.STRING, - number=1, - ) - stable_image: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/app_connectors_service.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/app_connectors_service.py deleted file mode 100644 index 367e285b480d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/app_connectors_service.py +++ /dev/null @@ -1,573 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info as gcba_resource_info -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.appconnectors.v1', - manifest={ - 'ListAppConnectorsRequest', - 'ListAppConnectorsResponse', - 'GetAppConnectorRequest', - 'CreateAppConnectorRequest', - 'UpdateAppConnectorRequest', - 'DeleteAppConnectorRequest', - 'ReportStatusRequest', - 'AppConnector', - 'AppConnectorOperationMetadata', - }, -) - - -class ListAppConnectorsRequest(proto.Message): - r"""Request message for BeyondCorp.ListAppConnectors. - - Attributes: - parent (str): - Required. The resource name of the AppConnector location - using the form: - ``projects/{project_id}/locations/{location_id}`` - page_size (int): - Optional. The maximum number of items to return. If not - specified, a default value of 50 will be used by the - service. Regardless of the page_size value, the response may - include a partial list and a caller should only rely on - response's - [next_page_token][BeyondCorp.ListAppConnectorsResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - Optional. The next_page_token value returned from a previous - ListAppConnectorsRequest, if any. - filter (str): - Optional. A filter specifying constraints of - a list operation. - order_by (str): - Optional. Specifies the ordering of results. See `Sorting - order `__ - for more information. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAppConnectorsResponse(proto.Message): - r"""Response message for BeyondCorp.ListAppConnectors. - - Attributes: - app_connectors (MutableSequence[google.cloud.beyondcorp_appconnectors_v1.types.AppConnector]): - A list of BeyondCorp AppConnectors in the - project. - next_page_token (str): - A token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - A list of locations that could not be - reached. - """ - - @property - def raw_page(self): - return self - - app_connectors: MutableSequence['AppConnector'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AppConnector', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetAppConnectorRequest(proto.Message): - r"""Request message for BeyondCorp.GetAppConnector. - - Attributes: - name (str): - Required. BeyondCorp AppConnector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAppConnectorRequest(proto.Message): - r"""Request message for BeyondCorp.CreateAppConnector. - - Attributes: - parent (str): - Required. The resource project name of the AppConnector - location using the form: - ``projects/{project_id}/locations/{location_id}`` - app_connector_id (str): - Optional. User-settable AppConnector resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - app_connector (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector): - Required. A BeyondCorp AppConnector resource. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - app_connector_id: str = proto.Field( - proto.STRING, - number=2, - ) - app_connector: 'AppConnector' = proto.Field( - proto.MESSAGE, - number=3, - message='AppConnector', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class UpdateAppConnectorRequest(proto.Message): - r"""Request message for BeyondCorp.UpdateAppConnector. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. At least one path must - be supplied in this field. The elements of the repeated - paths field may only include these fields from - [BeyondCorp.AppConnector]: - - - ``labels`` - - ``display_name`` - app_connector (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector): - Required. AppConnector message with updated fields. Only - supported fields specified in update_mask are updated. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - app_connector: 'AppConnector' = proto.Field( - proto.MESSAGE, - number=2, - message='AppConnector', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class DeleteAppConnectorRequest(proto.Message): - r"""Request message for BeyondCorp.DeleteAppConnector. - - Attributes: - name (str): - Required. BeyondCorp AppConnector name using the form: - ``projects/{project_id}/locations/{location_id}/appConnectors/{app_connector_id}`` - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ReportStatusRequest(proto.Message): - r"""Request report the connector status. - - Attributes: - app_connector (str): - Required. BeyondCorp Connector name using the form: - ``projects/{project_id}/locations/{location_id}/connectors/{connector}`` - resource_info (google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo): - Required. Resource info of the connector. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - app_connector: str = proto.Field( - proto.STRING, - number=1, - ) - resource_info: gcba_resource_info.ResourceInfo = proto.Field( - proto.MESSAGE, - number=2, - message=gcba_resource_info.ResourceInfo, - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class AppConnector(proto.Message): - r"""A BeyondCorp connector resource that represents an - application facing component deployed proximal to and with - direct access to the application instances. It is used to - establish connectivity between the remote enterprise environment - and GCP. It initiates connections to the applications and can - proxy the data from users over the connection. - - Attributes: - name (str): - Required. Unique resource name of the - AppConnector. The name is ignored when creating - a AppConnector. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the resource was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the resource was - last modified. - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user - provided metadata. - display_name (str): - Optional. An arbitrary user-provided name for - the AppConnector. Cannot exceed 64 characters. - uid (str): - Output only. A unique identifier for the - instance generated by the system. - state (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector.State): - Output only. The current state of the - AppConnector. - principal_info (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector.PrincipalInfo): - Required. Principal information about the - Identity of the AppConnector. - resource_info (google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo): - Optional. Resource info of the connector. - """ - class State(proto.Enum): - r"""Represents the different states of a AppConnector. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - CREATING (1): - AppConnector is being created. - CREATED (2): - AppConnector has been created. - UPDATING (3): - AppConnector's configuration is being - updated. - DELETING (4): - AppConnector is being deleted. - DOWN (5): - AppConnector is down and may be restored in - the future. This happens when CCFE sends - ProjectState = OFF. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - CREATED = 2 - UPDATING = 3 - DELETING = 4 - DOWN = 5 - - class PrincipalInfo(proto.Message): - r"""PrincipalInfo represents an Identity oneof. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - service_account (google.cloud.beyondcorp_appconnectors_v1.types.AppConnector.PrincipalInfo.ServiceAccount): - A GCP service account. - - This field is a member of `oneof`_ ``type``. - """ - - class ServiceAccount(proto.Message): - r"""ServiceAccount represents a GCP service account. - - Attributes: - email (str): - Email address of the service account. - """ - - email: str = proto.Field( - proto.STRING, - number=1, - ) - - service_account: 'AppConnector.PrincipalInfo.ServiceAccount' = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='AppConnector.PrincipalInfo.ServiceAccount', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - uid: str = proto.Field( - proto.STRING, - number=6, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - principal_info: PrincipalInfo = proto.Field( - proto.MESSAGE, - number=8, - message=PrincipalInfo, - ) - resource_info: gcba_resource_info.ResourceInfo = proto.Field( - proto.MESSAGE, - number=11, - message=gcba_resource_info.ResourceInfo, - ) - - -class AppConnectorOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/resource_info.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/resource_info.py deleted file mode 100644 index 69b9d6296a3b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/google/cloud/beyondcorp_appconnectors_v1/types/resource_info.py +++ /dev/null @@ -1,121 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.appconnectors.v1', - manifest={ - 'HealthStatus', - 'ResourceInfo', - }, -) - - -class HealthStatus(proto.Enum): - r"""HealthStatus represents the health status. - - Values: - HEALTH_STATUS_UNSPECIFIED (0): - Health status is unknown: not initialized or - failed to retrieve. - HEALTHY (1): - The resource is healthy. - UNHEALTHY (2): - The resource is unhealthy. - UNRESPONSIVE (3): - The resource is unresponsive. - DEGRADED (4): - Some sub-resources are UNHEALTHY. - """ - HEALTH_STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - UNHEALTHY = 2 - UNRESPONSIVE = 3 - DEGRADED = 4 - - -class ResourceInfo(proto.Message): - r"""ResourceInfo represents the information/status of an app connector - resource. Such as: - - - remote_agent - - - container - - - runtime - - appgateway - - - appconnector - - - appconnection - - - tunnel - - - logagent - - Attributes: - id (str): - Required. Unique Id for the resource. - status (google.cloud.beyondcorp_appconnectors_v1.types.HealthStatus): - Overall health status. Overall status is - derived based on the status of each sub level - resources. - resource (google.protobuf.any_pb2.Any): - Specific details for the resource. This is - for internal use only. - time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp to collect the info. It is - suggested to be set by the topmost level - resource only. - sub (MutableSequence[google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo]): - List of Info for the sub level resources. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - status: 'HealthStatus' = proto.Field( - proto.ENUM, - number=2, - enum='HealthStatus', - ) - resource: any_pb2.Any = proto.Field( - proto.MESSAGE, - number=3, - message=any_pb2.Any, - ) - time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - sub: MutableSequence['ResourceInfo'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='ResourceInfo', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/mypy.ini b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/noxfile.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/noxfile.py deleted file mode 100644 index 101fff37c869..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-beyondcorp-appconnectors' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_appconnectors_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_appconnectors_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_create_app_connector_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_create_app_connector_async.py deleted file mode 100644 index fe5d101e5348..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_create_app_connector_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_CreateAppConnector_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -async def sample_create_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.CreateAppConnectorRequest( - parent="parent_value", - app_connector=app_connector, - ) - - # Make the request - operation = client.create_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_CreateAppConnector_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_create_app_connector_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_create_app_connector_sync.py deleted file mode 100644 index 37972ba2e6b5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_create_app_connector_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_CreateAppConnector_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -def sample_create_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.CreateAppConnectorRequest( - parent="parent_value", - app_connector=app_connector, - ) - - # Make the request - operation = client.create_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_CreateAppConnector_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_delete_app_connector_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_delete_app_connector_async.py deleted file mode 100644 index 58c9b7e0ae4a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_delete_app_connector_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_DeleteAppConnector_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -async def sample_delete_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.DeleteAppConnectorRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_DeleteAppConnector_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_delete_app_connector_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_delete_app_connector_sync.py deleted file mode 100644 index 83a4bd2c1000..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_delete_app_connector_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_DeleteAppConnector_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -def sample_delete_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.DeleteAppConnectorRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_DeleteAppConnector_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_get_app_connector_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_get_app_connector_async.py deleted file mode 100644 index 1057519e31d8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_get_app_connector_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_GetAppConnector_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -async def sample_get_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.GetAppConnectorRequest( - name="name_value", - ) - - # Make the request - response = await client.get_app_connector(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_GetAppConnector_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_get_app_connector_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_get_app_connector_sync.py deleted file mode 100644 index 417c619a496c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_get_app_connector_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_GetAppConnector_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -def sample_get_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.GetAppConnectorRequest( - name="name_value", - ) - - # Make the request - response = client.get_app_connector(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_GetAppConnector_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_list_app_connectors_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_list_app_connectors_async.py deleted file mode 100644 index 869eafb34a00..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_list_app_connectors_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAppConnectors -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_ListAppConnectors_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -async def sample_list_app_connectors(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.ListAppConnectorsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connectors(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_ListAppConnectors_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_list_app_connectors_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_list_app_connectors_sync.py deleted file mode 100644 index 0059d251e0fa..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_list_app_connectors_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAppConnectors -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_ListAppConnectors_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -def sample_list_app_connectors(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appconnectors_v1.ListAppConnectorsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_connectors(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_ListAppConnectors_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_report_status_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_report_status_async.py deleted file mode 100644 index 74f472f0c7f4..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_report_status_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReportStatus -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_ReportStatus_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -async def sample_report_status(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - resource_info = beyondcorp_appconnectors_v1.ResourceInfo() - resource_info.id = "id_value" - - request = beyondcorp_appconnectors_v1.ReportStatusRequest( - app_connector="app_connector_value", - resource_info=resource_info, - ) - - # Make the request - operation = client.report_status(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_ReportStatus_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_report_status_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_report_status_sync.py deleted file mode 100644 index e9574edf1911..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_report_status_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ReportStatus -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_ReportStatus_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -def sample_report_status(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - resource_info = beyondcorp_appconnectors_v1.ResourceInfo() - resource_info.id = "id_value" - - request = beyondcorp_appconnectors_v1.ReportStatusRequest( - app_connector="app_connector_value", - resource_info=resource_info, - ) - - # Make the request - operation = client.report_status(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_ReportStatus_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_update_app_connector_async.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_update_app_connector_async.py deleted file mode 100644 index 7d61b89afee8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_update_app_connector_async.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_UpdateAppConnector_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -async def sample_update_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.UpdateAppConnectorRequest( - app_connector=app_connector, - ) - - # Make the request - operation = client.update_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_UpdateAppConnector_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_update_app_connector_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_update_app_connector_sync.py deleted file mode 100644 index d3eb4b327126..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/beyondcorp_v1_generated_app_connectors_service_update_app_connector_sync.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAppConnector -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appconnectors - - -# [START beyondcorp_v1_generated_AppConnectorsService_UpdateAppConnector_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appconnectors_v1 - - -def sample_update_app_connector(): - # Create a client - client = beyondcorp_appconnectors_v1.AppConnectorsServiceClient() - - # Initialize request argument(s) - app_connector = beyondcorp_appconnectors_v1.AppConnector() - app_connector.name = "name_value" - - request = beyondcorp_appconnectors_v1.UpdateAppConnectorRequest( - app_connector=app_connector, - ) - - # Make the request - operation = client.update_app_connector(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppConnectorsService_UpdateAppConnector_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json deleted file mode 100644 index e1535607b299..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json +++ /dev/null @@ -1,1013 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.beyondcorp.appconnectors.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-beyondcorp-appconnectors", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient", - "shortName": "AppConnectorsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient.create_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.CreateAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "CreateAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.CreateAppConnectorRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "app_connector", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.AppConnector" - }, - { - "name": "app_connector_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_app_connector" - }, - "description": "Sample for CreateAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_create_app_connector_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_CreateAppConnector_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_create_app_connector_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient", - "shortName": "AppConnectorsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient.create_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.CreateAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "CreateAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.CreateAppConnectorRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "app_connector", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.AppConnector" - }, - { - "name": "app_connector_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_app_connector" - }, - "description": "Sample for CreateAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_create_app_connector_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_CreateAppConnector_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_create_app_connector_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient", - "shortName": "AppConnectorsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient.delete_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.DeleteAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "DeleteAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.DeleteAppConnectorRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_app_connector" - }, - "description": "Sample for DeleteAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_delete_app_connector_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_DeleteAppConnector_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_delete_app_connector_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient", - "shortName": "AppConnectorsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient.delete_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.DeleteAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "DeleteAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.DeleteAppConnectorRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_app_connector" - }, - "description": "Sample for DeleteAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_delete_app_connector_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_DeleteAppConnector_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_delete_app_connector_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient", - "shortName": "AppConnectorsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient.get_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.GetAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "GetAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.GetAppConnectorRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnectors_v1.types.AppConnector", - "shortName": "get_app_connector" - }, - "description": "Sample for GetAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_get_app_connector_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_GetAppConnector_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_get_app_connector_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient", - "shortName": "AppConnectorsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient.get_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.GetAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "GetAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.GetAppConnectorRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnectors_v1.types.AppConnector", - "shortName": "get_app_connector" - }, - "description": "Sample for GetAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_get_app_connector_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_GetAppConnector_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_get_app_connector_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient", - "shortName": "AppConnectorsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient.list_app_connectors", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.ListAppConnectors", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "ListAppConnectors" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.pagers.ListAppConnectorsAsyncPager", - "shortName": "list_app_connectors" - }, - "description": "Sample for ListAppConnectors", - "file": "beyondcorp_v1_generated_app_connectors_service_list_app_connectors_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_ListAppConnectors_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_list_app_connectors_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient", - "shortName": "AppConnectorsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient.list_app_connectors", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.ListAppConnectors", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "ListAppConnectors" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.ListAppConnectorsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.pagers.ListAppConnectorsPager", - "shortName": "list_app_connectors" - }, - "description": "Sample for ListAppConnectors", - "file": "beyondcorp_v1_generated_app_connectors_service_list_app_connectors_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_ListAppConnectors_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_list_app_connectors_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient", - "shortName": "AppConnectorsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient.report_status", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.ReportStatus", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "ReportStatus" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.ReportStatusRequest" - }, - { - "name": "app_connector", - "type": "str" - }, - { - "name": "resource_info", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "report_status" - }, - "description": "Sample for ReportStatus", - "file": "beyondcorp_v1_generated_app_connectors_service_report_status_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_ReportStatus_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_report_status_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient", - "shortName": "AppConnectorsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient.report_status", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.ReportStatus", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "ReportStatus" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.ReportStatusRequest" - }, - { - "name": "app_connector", - "type": "str" - }, - { - "name": "resource_info", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.ResourceInfo" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "report_status" - }, - "description": "Sample for ReportStatus", - "file": "beyondcorp_v1_generated_app_connectors_service_report_status_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_ReportStatus_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_report_status_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient", - "shortName": "AppConnectorsServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceAsyncClient.update_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.UpdateAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "UpdateAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.UpdateAppConnectorRequest" - }, - { - "name": "app_connector", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.AppConnector" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_app_connector" - }, - "description": "Sample for UpdateAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_update_app_connector_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_UpdateAppConnector_async", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_update_app_connector_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient", - "shortName": "AppConnectorsServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appconnectors_v1.AppConnectorsServiceClient.update_app_connector", - "method": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService.UpdateAppConnector", - "service": { - "fullName": "google.cloud.beyondcorp.appconnectors.v1.AppConnectorsService", - "shortName": "AppConnectorsService" - }, - "shortName": "UpdateAppConnector" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.UpdateAppConnectorRequest" - }, - { - "name": "app_connector", - "type": "google.cloud.beyondcorp_appconnectors_v1.types.AppConnector" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_app_connector" - }, - "description": "Sample for UpdateAppConnector", - "file": "beyondcorp_v1_generated_app_connectors_service_update_app_connector_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppConnectorsService_UpdateAppConnector_sync", - "segments": [ - { - "end": 58, - "start": 27, - "type": "FULL" - }, - { - "end": 58, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 55, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 59, - "start": 56, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_connectors_service_update_app_connector_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/scripts/fixup_beyondcorp_appconnectors_v1_keywords.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/scripts/fixup_beyondcorp_appconnectors_v1_keywords.py deleted file mode 100644 index cb69b5e4271a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/scripts/fixup_beyondcorp_appconnectors_v1_keywords.py +++ /dev/null @@ -1,181 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class beyondcorp_appconnectorsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_app_connector': ('parent', 'app_connector', 'app_connector_id', 'request_id', 'validate_only', ), - 'delete_app_connector': ('name', 'request_id', 'validate_only', ), - 'get_app_connector': ('name', ), - 'list_app_connectors': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'report_status': ('app_connector', 'resource_info', 'request_id', 'validate_only', ), - 'update_app_connector': ('update_mask', 'app_connector', 'request_id', 'validate_only', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=beyondcorp_appconnectorsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the beyondcorp_appconnectors client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/setup.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/setup.py deleted file mode 100644 index 7be807589229..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-beyondcorp-appconnectors' - - -description = "Google Cloud Beyondcorp Appconnectors API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/beyondcorp_appconnectors/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appconnectors" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/beyondcorp_appconnectors_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/beyondcorp_appconnectors_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/beyondcorp_appconnectors_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py b/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py deleted file mode 100644 index 3feca6e32d90..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appconnectors/v1/tests/unit/gapic/beyondcorp_appconnectors_v1/test_app_connectors_service.py +++ /dev/null @@ -1,7713 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service import AppConnectorsServiceAsyncClient -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service import AppConnectorsServiceClient -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service import pagers -from google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service import transports -from google.cloud.beyondcorp_appconnectors_v1.types import app_connectors_service -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info -from google.cloud.beyondcorp_appconnectors_v1.types import resource_info as gcba_resource_info -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AppConnectorsServiceClient._get_default_mtls_endpoint(None) is None - assert AppConnectorsServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AppConnectorsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AppConnectorsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AppConnectorsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AppConnectorsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AppConnectorsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AppConnectorsServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AppConnectorsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AppConnectorsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AppConnectorsServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AppConnectorsServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AppConnectorsServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AppConnectorsServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AppConnectorsServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AppConnectorsServiceClient._get_client_cert_source(None, False) is None - assert AppConnectorsServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AppConnectorsServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AppConnectorsServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AppConnectorsServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AppConnectorsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceClient)) -@mock.patch.object(AppConnectorsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AppConnectorsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AppConnectorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AppConnectorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AppConnectorsServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AppConnectorsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AppConnectorsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppConnectorsServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AppConnectorsServiceClient._get_api_endpoint(None, None, default_universe, "always") == AppConnectorsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppConnectorsServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AppConnectorsServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppConnectorsServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AppConnectorsServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AppConnectorsServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AppConnectorsServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AppConnectorsServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AppConnectorsServiceClient._get_universe_domain(None, None) == AppConnectorsServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AppConnectorsServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AppConnectorsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AppConnectorsServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AppConnectorsServiceClient, "grpc"), - (AppConnectorsServiceAsyncClient, "grpc_asyncio"), - (AppConnectorsServiceClient, "rest"), -]) -def test_app_connectors_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AppConnectorsServiceGrpcTransport, "grpc"), - (transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AppConnectorsServiceRestTransport, "rest"), -]) -def test_app_connectors_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AppConnectorsServiceClient, "grpc"), - (AppConnectorsServiceAsyncClient, "grpc_asyncio"), - (AppConnectorsServiceClient, "rest"), -]) -def test_app_connectors_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -def test_app_connectors_service_client_get_transport_class(): - transport = AppConnectorsServiceClient.get_transport_class() - available_transports = [ - transports.AppConnectorsServiceGrpcTransport, - transports.AppConnectorsServiceRestTransport, - ] - assert transport in available_transports - - transport = AppConnectorsServiceClient.get_transport_class("grpc") - assert transport == transports.AppConnectorsServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport, "grpc"), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AppConnectorsServiceClient, transports.AppConnectorsServiceRestTransport, "rest"), -]) -@mock.patch.object(AppConnectorsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceClient)) -@mock.patch.object(AppConnectorsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceAsyncClient)) -def test_app_connectors_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AppConnectorsServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AppConnectorsServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport, "grpc", "true"), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport, "grpc", "false"), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AppConnectorsServiceClient, transports.AppConnectorsServiceRestTransport, "rest", "true"), - (AppConnectorsServiceClient, transports.AppConnectorsServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AppConnectorsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceClient)) -@mock.patch.object(AppConnectorsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_app_connectors_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AppConnectorsServiceClient, AppConnectorsServiceAsyncClient -]) -@mock.patch.object(AppConnectorsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AppConnectorsServiceClient)) -@mock.patch.object(AppConnectorsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AppConnectorsServiceAsyncClient)) -def test_app_connectors_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AppConnectorsServiceClient, AppConnectorsServiceAsyncClient -]) -@mock.patch.object(AppConnectorsServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceClient)) -@mock.patch.object(AppConnectorsServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppConnectorsServiceAsyncClient)) -def test_app_connectors_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AppConnectorsServiceClient._DEFAULT_UNIVERSE - default_endpoint = AppConnectorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AppConnectorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport, "grpc"), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AppConnectorsServiceClient, transports.AppConnectorsServiceRestTransport, "rest"), -]) -def test_app_connectors_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport, "grpc", grpc_helpers), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AppConnectorsServiceClient, transports.AppConnectorsServiceRestTransport, "rest", None), -]) -def test_app_connectors_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_app_connectors_service_client_client_options_from_dict(): - with mock.patch('google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.transports.AppConnectorsServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AppConnectorsServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport, "grpc", grpc_helpers), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_app_connectors_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.ListAppConnectorsRequest, - dict, -]) -def test_list_app_connectors(request_type, transport: str = 'grpc'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connectors_service.ListAppConnectorsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connectors_service.ListAppConnectorsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppConnectorsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_app_connectors_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connectors_service.ListAppConnectorsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_app_connectors(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connectors_service.ListAppConnectorsRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_app_connectors_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_app_connectors in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_app_connectors] = mock_rpc - request = {} - client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_app_connectors(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_app_connectors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_app_connectors in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_app_connectors] = mock_rpc - - request = {} - await client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_app_connectors(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_app_connectors_async(transport: str = 'grpc_asyncio', request_type=app_connectors_service.ListAppConnectorsRequest): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.ListAppConnectorsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connectors_service.ListAppConnectorsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppConnectorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_app_connectors_async_from_dict(): - await test_list_app_connectors_async(request_type=dict) - -def test_list_app_connectors_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.ListAppConnectorsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - call.return_value = app_connectors_service.ListAppConnectorsResponse() - client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_app_connectors_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.ListAppConnectorsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.ListAppConnectorsResponse()) - await client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_app_connectors_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connectors_service.ListAppConnectorsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_app_connectors( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_app_connectors_flattened_error(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_app_connectors( - app_connectors_service.ListAppConnectorsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_app_connectors_flattened_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connectors_service.ListAppConnectorsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.ListAppConnectorsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_app_connectors( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_app_connectors_flattened_error_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_app_connectors( - app_connectors_service.ListAppConnectorsRequest(), - parent='parent_value', - ) - - -def test_list_app_connectors_pager(transport_name: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - next_page_token='abc', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[], - next_page_token='def', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - ], - next_page_token='ghi', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_app_connectors(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_connectors_service.AppConnector) - for i in results) -def test_list_app_connectors_pages(transport_name: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - next_page_token='abc', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[], - next_page_token='def', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - ], - next_page_token='ghi', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - ), - RuntimeError, - ) - pages = list(client.list_app_connectors(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_app_connectors_async_pager(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - next_page_token='abc', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[], - next_page_token='def', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - ], - next_page_token='ghi', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_app_connectors(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, app_connectors_service.AppConnector) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_app_connectors_async_pages(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - next_page_token='abc', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[], - next_page_token='def', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - ], - next_page_token='ghi', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_app_connectors(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.GetAppConnectorRequest, - dict, -]) -def test_get_app_connector(request_type, transport: str = 'grpc'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connectors_service.AppConnector( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=app_connectors_service.AppConnector.State.CREATING, - ) - response = client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connectors_service.GetAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, app_connectors_service.AppConnector) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == app_connectors_service.AppConnector.State.CREATING - - -def test_get_app_connector_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connectors_service.GetAppConnectorRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_app_connector(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connectors_service.GetAppConnectorRequest( - name='name_value', - ) - -def test_get_app_connector_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_app_connector] = mock_rpc - request = {} - client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_app_connector_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_app_connector in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_app_connector] = mock_rpc - - request = {} - await client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_app_connector_async(transport: str = 'grpc_asyncio', request_type=app_connectors_service.GetAppConnectorRequest): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.AppConnector( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=app_connectors_service.AppConnector.State.CREATING, - )) - response = await client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connectors_service.GetAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, app_connectors_service.AppConnector) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == app_connectors_service.AppConnector.State.CREATING - - -@pytest.mark.asyncio -async def test_get_app_connector_async_from_dict(): - await test_get_app_connector_async(request_type=dict) - -def test_get_app_connector_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.GetAppConnectorRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - call.return_value = app_connectors_service.AppConnector() - client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_app_connector_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.GetAppConnectorRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.AppConnector()) - await client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_app_connector_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connectors_service.AppConnector() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_app_connector( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_app_connector_flattened_error(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_app_connector( - app_connectors_service.GetAppConnectorRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_app_connector_flattened_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_connectors_service.AppConnector() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.AppConnector()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_app_connector( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_app_connector_flattened_error_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_app_connector( - app_connectors_service.GetAppConnectorRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.CreateAppConnectorRequest, - dict, -]) -def test_create_app_connector(request_type, transport: str = 'grpc'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connectors_service.CreateAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_app_connector_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connectors_service.CreateAppConnectorRequest( - parent='parent_value', - app_connector_id='app_connector_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_app_connector(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connectors_service.CreateAppConnectorRequest( - parent='parent_value', - app_connector_id='app_connector_id_value', - request_id='request_id_value', - ) - -def test_create_app_connector_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_app_connector] = mock_rpc - request = {} - client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_app_connector_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_app_connector in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_app_connector] = mock_rpc - - request = {} - await client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_app_connector_async(transport: str = 'grpc_asyncio', request_type=app_connectors_service.CreateAppConnectorRequest): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connectors_service.CreateAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_app_connector_async_from_dict(): - await test_create_app_connector_async(request_type=dict) - -def test_create_app_connector_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.CreateAppConnectorRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_app_connector_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.CreateAppConnectorRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_app_connector_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_app_connector( - parent='parent_value', - app_connector=app_connectors_service.AppConnector(name='name_value'), - app_connector_id='app_connector_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].app_connector - mock_val = app_connectors_service.AppConnector(name='name_value') - assert arg == mock_val - arg = args[0].app_connector_id - mock_val = 'app_connector_id_value' - assert arg == mock_val - - -def test_create_app_connector_flattened_error(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_app_connector( - app_connectors_service.CreateAppConnectorRequest(), - parent='parent_value', - app_connector=app_connectors_service.AppConnector(name='name_value'), - app_connector_id='app_connector_id_value', - ) - -@pytest.mark.asyncio -async def test_create_app_connector_flattened_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_app_connector( - parent='parent_value', - app_connector=app_connectors_service.AppConnector(name='name_value'), - app_connector_id='app_connector_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].app_connector - mock_val = app_connectors_service.AppConnector(name='name_value') - assert arg == mock_val - arg = args[0].app_connector_id - mock_val = 'app_connector_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_app_connector_flattened_error_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_app_connector( - app_connectors_service.CreateAppConnectorRequest(), - parent='parent_value', - app_connector=app_connectors_service.AppConnector(name='name_value'), - app_connector_id='app_connector_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.UpdateAppConnectorRequest, - dict, -]) -def test_update_app_connector(request_type, transport: str = 'grpc'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connectors_service.UpdateAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_app_connector_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connectors_service.UpdateAppConnectorRequest( - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_app_connector(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connectors_service.UpdateAppConnectorRequest( - request_id='request_id_value', - ) - -def test_update_app_connector_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_app_connector] = mock_rpc - request = {} - client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_app_connector_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_app_connector in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_app_connector] = mock_rpc - - request = {} - await client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_app_connector_async(transport: str = 'grpc_asyncio', request_type=app_connectors_service.UpdateAppConnectorRequest): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connectors_service.UpdateAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_app_connector_async_from_dict(): - await test_update_app_connector_async(request_type=dict) - -def test_update_app_connector_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.UpdateAppConnectorRequest() - - request.app_connector.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'app_connector.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_app_connector_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.UpdateAppConnectorRequest() - - request.app_connector.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'app_connector.name=name_value', - ) in kw['metadata'] - - -def test_update_app_connector_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_app_connector( - app_connector=app_connectors_service.AppConnector(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].app_connector - mock_val = app_connectors_service.AppConnector(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_app_connector_flattened_error(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_app_connector( - app_connectors_service.UpdateAppConnectorRequest(), - app_connector=app_connectors_service.AppConnector(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_app_connector_flattened_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_app_connector( - app_connector=app_connectors_service.AppConnector(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].app_connector - mock_val = app_connectors_service.AppConnector(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_app_connector_flattened_error_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_app_connector( - app_connectors_service.UpdateAppConnectorRequest(), - app_connector=app_connectors_service.AppConnector(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.DeleteAppConnectorRequest, - dict, -]) -def test_delete_app_connector(request_type, transport: str = 'grpc'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connectors_service.DeleteAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_app_connector_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connectors_service.DeleteAppConnectorRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_app_connector(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connectors_service.DeleteAppConnectorRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_app_connector_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_app_connector] = mock_rpc - request = {} - client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_app_connector_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_app_connector in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_app_connector] = mock_rpc - - request = {} - await client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_app_connector_async(transport: str = 'grpc_asyncio', request_type=app_connectors_service.DeleteAppConnectorRequest): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connectors_service.DeleteAppConnectorRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_app_connector_async_from_dict(): - await test_delete_app_connector_async(request_type=dict) - -def test_delete_app_connector_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.DeleteAppConnectorRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_app_connector_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.DeleteAppConnectorRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_app_connector_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_app_connector( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_app_connector_flattened_error(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_app_connector( - app_connectors_service.DeleteAppConnectorRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_app_connector_flattened_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_app_connector( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_app_connector_flattened_error_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_app_connector( - app_connectors_service.DeleteAppConnectorRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.ReportStatusRequest, - dict, -]) -def test_report_status(request_type, transport: str = 'grpc'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_connectors_service.ReportStatusRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_report_status_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_connectors_service.ReportStatusRequest( - app_connector='app_connector_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.report_status(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_connectors_service.ReportStatusRequest( - app_connector='app_connector_value', - request_id='request_id_value', - ) - -def test_report_status_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.report_status in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.report_status] = mock_rpc - request = {} - client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.report_status(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_report_status_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.report_status in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.report_status] = mock_rpc - - request = {} - await client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.report_status(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_report_status_async(transport: str = 'grpc_asyncio', request_type=app_connectors_service.ReportStatusRequest): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_connectors_service.ReportStatusRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_report_status_async_from_dict(): - await test_report_status_async(request_type=dict) - -def test_report_status_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.ReportStatusRequest() - - request.app_connector = 'app_connector_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'app_connector=app_connector_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_report_status_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_connectors_service.ReportStatusRequest() - - request.app_connector = 'app_connector_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'app_connector=app_connector_value', - ) in kw['metadata'] - - -def test_report_status_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.report_status( - app_connector='app_connector_value', - resource_info=gcba_resource_info.ResourceInfo(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].app_connector - mock_val = 'app_connector_value' - assert arg == mock_val - arg = args[0].resource_info - mock_val = gcba_resource_info.ResourceInfo(id='id_value') - assert arg == mock_val - - -def test_report_status_flattened_error(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.report_status( - app_connectors_service.ReportStatusRequest(), - app_connector='app_connector_value', - resource_info=gcba_resource_info.ResourceInfo(id='id_value'), - ) - -@pytest.mark.asyncio -async def test_report_status_flattened_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.report_status( - app_connector='app_connector_value', - resource_info=gcba_resource_info.ResourceInfo(id='id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].app_connector - mock_val = 'app_connector_value' - assert arg == mock_val - arg = args[0].resource_info - mock_val = gcba_resource_info.ResourceInfo(id='id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_report_status_flattened_error_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.report_status( - app_connectors_service.ReportStatusRequest(), - app_connector='app_connector_value', - resource_info=gcba_resource_info.ResourceInfo(id='id_value'), - ) - - -def test_list_app_connectors_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_app_connectors in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_app_connectors] = mock_rpc - - request = {} - client.list_app_connectors(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_app_connectors(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_app_connectors_rest_required_fields(request_type=app_connectors_service.ListAppConnectorsRequest): - transport_class = transports.AppConnectorsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_app_connectors._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_app_connectors._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_connectors_service.ListAppConnectorsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connectors_service.ListAppConnectorsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_app_connectors(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_app_connectors_rest_unset_required_fields(): - transport = transports.AppConnectorsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_app_connectors._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_app_connectors_rest_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connectors_service.ListAppConnectorsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_connectors_service.ListAppConnectorsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_app_connectors(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appConnectors" % client.transport._host, args[1]) - - -def test_list_app_connectors_rest_flattened_error(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_app_connectors( - app_connectors_service.ListAppConnectorsRequest(), - parent='parent_value', - ) - - -def test_list_app_connectors_rest_pager(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - next_page_token='abc', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[], - next_page_token='def', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - ], - next_page_token='ghi', - ), - app_connectors_service.ListAppConnectorsResponse( - app_connectors=[ - app_connectors_service.AppConnector(), - app_connectors_service.AppConnector(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(app_connectors_service.ListAppConnectorsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_app_connectors(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_connectors_service.AppConnector) - for i in results) - - pages = list(client.list_app_connectors(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_app_connector_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_app_connector] = mock_rpc - - request = {} - client.get_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_app_connector_rest_required_fields(request_type=app_connectors_service.GetAppConnectorRequest): - transport_class = transports.AppConnectorsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_app_connector._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_app_connector._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_connectors_service.AppConnector() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connectors_service.AppConnector.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_app_connector(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_app_connector_rest_unset_required_fields(): - transport = transports.AppConnectorsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_app_connector._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_app_connector_rest_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connectors_service.AppConnector() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_connectors_service.AppConnector.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_app_connector(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/appConnectors/*}" % client.transport._host, args[1]) - - -def test_get_app_connector_rest_flattened_error(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_app_connector( - app_connectors_service.GetAppConnectorRequest(), - name='name_value', - ) - - -def test_create_app_connector_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_app_connector] = mock_rpc - - request = {} - client.create_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_app_connector_rest_required_fields(request_type=app_connectors_service.CreateAppConnectorRequest): - transport_class = transports.AppConnectorsServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_app_connector._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_app_connector._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("app_connector_id", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_app_connector(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_app_connector_rest_unset_required_fields(): - transport = transports.AppConnectorsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_app_connector._get_unset_required_fields({}) - assert set(unset_fields) == (set(("appConnectorId", "requestId", "validateOnly", )) & set(("parent", "appConnector", ))) - - -def test_create_app_connector_rest_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - app_connector=app_connectors_service.AppConnector(name='name_value'), - app_connector_id='app_connector_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_app_connector(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appConnectors" % client.transport._host, args[1]) - - -def test_create_app_connector_rest_flattened_error(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_app_connector( - app_connectors_service.CreateAppConnectorRequest(), - parent='parent_value', - app_connector=app_connectors_service.AppConnector(name='name_value'), - app_connector_id='app_connector_id_value', - ) - - -def test_update_app_connector_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_app_connector] = mock_rpc - - request = {} - client.update_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_app_connector_rest_required_fields(request_type=app_connectors_service.UpdateAppConnectorRequest): - transport_class = transports.AppConnectorsServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_app_connector._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_app_connector._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_app_connector(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_app_connector_rest_unset_required_fields(): - transport = transports.AppConnectorsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_app_connector._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "updateMask", "validateOnly", )) & set(("updateMask", "appConnector", ))) - - -def test_update_app_connector_rest_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'app_connector': {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - app_connector=app_connectors_service.AppConnector(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_app_connector(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{app_connector.name=projects/*/locations/*/appConnectors/*}" % client.transport._host, args[1]) - - -def test_update_app_connector_rest_flattened_error(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_app_connector( - app_connectors_service.UpdateAppConnectorRequest(), - app_connector=app_connectors_service.AppConnector(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_app_connector_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_app_connector in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_app_connector] = mock_rpc - - request = {} - client.delete_app_connector(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_app_connector(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_app_connector_rest_required_fields(request_type=app_connectors_service.DeleteAppConnectorRequest): - transport_class = transports.AppConnectorsServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_app_connector._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_app_connector._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_app_connector(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_app_connector_rest_unset_required_fields(): - transport = transports.AppConnectorsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_app_connector._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("name", ))) - - -def test_delete_app_connector_rest_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_app_connector(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/appConnectors/*}" % client.transport._host, args[1]) - - -def test_delete_app_connector_rest_flattened_error(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_app_connector( - app_connectors_service.DeleteAppConnectorRequest(), - name='name_value', - ) - - -def test_report_status_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.report_status in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.report_status] = mock_rpc - - request = {} - client.report_status(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.report_status(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_report_status_rest_required_fields(request_type=app_connectors_service.ReportStatusRequest): - transport_class = transports.AppConnectorsServiceRestTransport - - request_init = {} - request_init["app_connector"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).report_status._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["appConnector"] = 'app_connector_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).report_status._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "appConnector" in jsonified_request - assert jsonified_request["appConnector"] == 'app_connector_value' - - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.report_status(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_report_status_rest_unset_required_fields(): - transport = transports.AppConnectorsServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.report_status._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("appConnector", "resourceInfo", ))) - - -def test_report_status_rest_flattened(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'app_connector': 'projects/sample1/locations/sample2/appConnectors/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - app_connector='app_connector_value', - resource_info=gcba_resource_info.ResourceInfo(id='id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.report_status(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{app_connector=projects/*/locations/*/appConnectors/*}:reportStatus" % client.transport._host, args[1]) - - -def test_report_status_rest_flattened_error(transport: str = 'rest'): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.report_status( - app_connectors_service.ReportStatusRequest(), - app_connector='app_connector_value', - resource_info=gcba_resource_info.ResourceInfo(id='id_value'), - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AppConnectorsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AppConnectorsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppConnectorsServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AppConnectorsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AppConnectorsServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AppConnectorsServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AppConnectorsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppConnectorsServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AppConnectorsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AppConnectorsServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AppConnectorsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AppConnectorsServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AppConnectorsServiceGrpcTransport, - transports.AppConnectorsServiceGrpcAsyncIOTransport, - transports.AppConnectorsServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AppConnectorsServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_app_connectors_empty_call_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - call.return_value = app_connectors_service.ListAppConnectorsResponse() - client.list_app_connectors(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.ListAppConnectorsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_app_connector_empty_call_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - call.return_value = app_connectors_service.AppConnector() - client.get_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.GetAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_app_connector_empty_call_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.CreateAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_app_connector_empty_call_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.UpdateAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_app_connector_empty_call_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.DeleteAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_report_status_empty_call_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.report_status(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.ReportStatusRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AppConnectorsServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_app_connectors_empty_call_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.ListAppConnectorsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_app_connectors(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.ListAppConnectorsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_app_connector_empty_call_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_connectors_service.AppConnector( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=app_connectors_service.AppConnector.State.CREATING, - )) - await client.get_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.GetAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_app_connector_empty_call_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.CreateAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_app_connector_empty_call_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.UpdateAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_app_connector_empty_call_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.DeleteAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_report_status_empty_call_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.report_status(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.ReportStatusRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AppConnectorsServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_app_connectors_rest_bad_request(request_type=app_connectors_service.ListAppConnectorsRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_app_connectors(request) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.ListAppConnectorsRequest, - dict, -]) -def test_list_app_connectors_rest_call_success(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connectors_service.ListAppConnectorsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connectors_service.ListAppConnectorsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_app_connectors(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppConnectorsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_app_connectors_rest_interceptors(null_interceptor): - transport = transports.AppConnectorsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectorsServiceRestInterceptor(), - ) - client = AppConnectorsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_list_app_connectors") as post, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_list_app_connectors_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "pre_list_app_connectors") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connectors_service.ListAppConnectorsRequest.pb(app_connectors_service.ListAppConnectorsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_connectors_service.ListAppConnectorsResponse.to_json(app_connectors_service.ListAppConnectorsResponse()) - req.return_value.content = return_value - - request = app_connectors_service.ListAppConnectorsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_connectors_service.ListAppConnectorsResponse() - post_with_metadata.return_value = app_connectors_service.ListAppConnectorsResponse(), metadata - - client.list_app_connectors(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_app_connector_rest_bad_request(request_type=app_connectors_service.GetAppConnectorRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_app_connector(request) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.GetAppConnectorRequest, - dict, -]) -def test_get_app_connector_rest_call_success(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_connectors_service.AppConnector( - name='name_value', - display_name='display_name_value', - uid='uid_value', - state=app_connectors_service.AppConnector.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_connectors_service.AppConnector.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_app_connector(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, app_connectors_service.AppConnector) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.state == app_connectors_service.AppConnector.State.CREATING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_app_connector_rest_interceptors(null_interceptor): - transport = transports.AppConnectorsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectorsServiceRestInterceptor(), - ) - client = AppConnectorsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_get_app_connector") as post, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_get_app_connector_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "pre_get_app_connector") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connectors_service.GetAppConnectorRequest.pb(app_connectors_service.GetAppConnectorRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_connectors_service.AppConnector.to_json(app_connectors_service.AppConnector()) - req.return_value.content = return_value - - request = app_connectors_service.GetAppConnectorRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_connectors_service.AppConnector() - post_with_metadata.return_value = app_connectors_service.AppConnector(), metadata - - client.get_app_connector(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_app_connector_rest_bad_request(request_type=app_connectors_service.CreateAppConnectorRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_app_connector(request) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.CreateAppConnectorRequest, - dict, -]) -def test_create_app_connector_rest_call_success(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["app_connector"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'display_name': 'display_name_value', 'uid': 'uid_value', 'state': 1, 'principal_info': {'service_account': {'email': 'email_value'}}, 'resource_info': {'id': 'id_value', 'status': 1, 'resource': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'time': {}, 'sub': {}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = app_connectors_service.CreateAppConnectorRequest.meta.fields["app_connector"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["app_connector"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["app_connector"][field])): - del request_init["app_connector"][field][i][subfield] - else: - del request_init["app_connector"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_app_connector(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_app_connector_rest_interceptors(null_interceptor): - transport = transports.AppConnectorsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectorsServiceRestInterceptor(), - ) - client = AppConnectorsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_create_app_connector") as post, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_create_app_connector_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "pre_create_app_connector") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connectors_service.CreateAppConnectorRequest.pb(app_connectors_service.CreateAppConnectorRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connectors_service.CreateAppConnectorRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_app_connector(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_app_connector_rest_bad_request(request_type=app_connectors_service.UpdateAppConnectorRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'app_connector': {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_app_connector(request) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.UpdateAppConnectorRequest, - dict, -]) -def test_update_app_connector_rest_call_success(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'app_connector': {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'}} - request_init["app_connector"] = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'display_name': 'display_name_value', 'uid': 'uid_value', 'state': 1, 'principal_info': {'service_account': {'email': 'email_value'}}, 'resource_info': {'id': 'id_value', 'status': 1, 'resource': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'time': {}, 'sub': {}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = app_connectors_service.UpdateAppConnectorRequest.meta.fields["app_connector"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["app_connector"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["app_connector"][field])): - del request_init["app_connector"][field][i][subfield] - else: - del request_init["app_connector"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_app_connector(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_app_connector_rest_interceptors(null_interceptor): - transport = transports.AppConnectorsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectorsServiceRestInterceptor(), - ) - client = AppConnectorsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_update_app_connector") as post, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_update_app_connector_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "pre_update_app_connector") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connectors_service.UpdateAppConnectorRequest.pb(app_connectors_service.UpdateAppConnectorRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connectors_service.UpdateAppConnectorRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_app_connector(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_app_connector_rest_bad_request(request_type=app_connectors_service.DeleteAppConnectorRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_app_connector(request) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.DeleteAppConnectorRequest, - dict, -]) -def test_delete_app_connector_rest_call_success(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appConnectors/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_app_connector(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_app_connector_rest_interceptors(null_interceptor): - transport = transports.AppConnectorsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectorsServiceRestInterceptor(), - ) - client = AppConnectorsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_delete_app_connector") as post, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_delete_app_connector_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "pre_delete_app_connector") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connectors_service.DeleteAppConnectorRequest.pb(app_connectors_service.DeleteAppConnectorRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connectors_service.DeleteAppConnectorRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_app_connector(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_report_status_rest_bad_request(request_type=app_connectors_service.ReportStatusRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'app_connector': 'projects/sample1/locations/sample2/appConnectors/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.report_status(request) - - -@pytest.mark.parametrize("request_type", [ - app_connectors_service.ReportStatusRequest, - dict, -]) -def test_report_status_rest_call_success(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'app_connector': 'projects/sample1/locations/sample2/appConnectors/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.report_status(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_report_status_rest_interceptors(null_interceptor): - transport = transports.AppConnectorsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppConnectorsServiceRestInterceptor(), - ) - client = AppConnectorsServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_report_status") as post, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "post_report_status_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppConnectorsServiceRestInterceptor, "pre_report_status") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_connectors_service.ReportStatusRequest.pb(app_connectors_service.ReportStatusRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_connectors_service.ReportStatusRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.report_status(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_app_connectors_empty_call_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_connectors), - '__call__') as call: - client.list_app_connectors(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.ListAppConnectorsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_app_connector_empty_call_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_connector), - '__call__') as call: - client.get_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.GetAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_app_connector_empty_call_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_connector), - '__call__') as call: - client.create_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.CreateAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_app_connector_empty_call_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_app_connector), - '__call__') as call: - client.update_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.UpdateAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_app_connector_empty_call_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_connector), - '__call__') as call: - client.delete_app_connector(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.DeleteAppConnectorRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_report_status_empty_call_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.report_status), - '__call__') as call: - client.report_status(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_connectors_service.ReportStatusRequest() - - assert args[0] == request_msg - - -def test_app_connectors_service_rest_lro_client(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AppConnectorsServiceGrpcTransport, - ) - -def test_app_connectors_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AppConnectorsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_app_connectors_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.transports.AppConnectorsServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AppConnectorsServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_app_connectors', - 'get_app_connector', - 'create_app_connector', - 'update_app_connector', - 'delete_app_connector', - 'report_status', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_app_connectors_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.transports.AppConnectorsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AppConnectorsServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_app_connectors_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.beyondcorp_appconnectors_v1.services.app_connectors_service.transports.AppConnectorsServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AppConnectorsServiceTransport() - adc.assert_called_once() - - -def test_app_connectors_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AppConnectorsServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AppConnectorsServiceGrpcTransport, - transports.AppConnectorsServiceGrpcAsyncIOTransport, - ], -) -def test_app_connectors_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AppConnectorsServiceGrpcTransport, - transports.AppConnectorsServiceGrpcAsyncIOTransport, - transports.AppConnectorsServiceRestTransport, - ], -) -def test_app_connectors_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AppConnectorsServiceGrpcTransport, grpc_helpers), - (transports.AppConnectorsServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_app_connectors_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AppConnectorsServiceGrpcTransport, transports.AppConnectorsServiceGrpcAsyncIOTransport]) -def test_app_connectors_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_app_connectors_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AppConnectorsServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_app_connectors_service_host_no_port(transport_name): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_app_connectors_service_host_with_port(transport_name): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_app_connectors_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AppConnectorsServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AppConnectorsServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_app_connectors._session - session2 = client2.transport.list_app_connectors._session - assert session1 != session2 - session1 = client1.transport.get_app_connector._session - session2 = client2.transport.get_app_connector._session - assert session1 != session2 - session1 = client1.transport.create_app_connector._session - session2 = client2.transport.create_app_connector._session - assert session1 != session2 - session1 = client1.transport.update_app_connector._session - session2 = client2.transport.update_app_connector._session - assert session1 != session2 - session1 = client1.transport.delete_app_connector._session - session2 = client2.transport.delete_app_connector._session - assert session1 != session2 - session1 = client1.transport.report_status._session - session2 = client2.transport.report_status._session - assert session1 != session2 -def test_app_connectors_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AppConnectorsServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_app_connectors_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AppConnectorsServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AppConnectorsServiceGrpcTransport, transports.AppConnectorsServiceGrpcAsyncIOTransport]) -def test_app_connectors_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AppConnectorsServiceGrpcTransport, transports.AppConnectorsServiceGrpcAsyncIOTransport]) -def test_app_connectors_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_app_connectors_service_grpc_lro_client(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_app_connectors_service_grpc_lro_async_client(): - client = AppConnectorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_app_connector_path(): - project = "squid" - location = "clam" - app_connector = "whelk" - expected = "projects/{project}/locations/{location}/appConnectors/{app_connector}".format(project=project, location=location, app_connector=app_connector, ) - actual = AppConnectorsServiceClient.app_connector_path(project, location, app_connector) - assert expected == actual - - -def test_parse_app_connector_path(): - expected = { - "project": "octopus", - "location": "oyster", - "app_connector": "nudibranch", - } - path = AppConnectorsServiceClient.app_connector_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectorsServiceClient.parse_app_connector_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AppConnectorsServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AppConnectorsServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectorsServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AppConnectorsServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AppConnectorsServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectorsServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AppConnectorsServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AppConnectorsServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectorsServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AppConnectorsServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AppConnectorsServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectorsServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AppConnectorsServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AppConnectorsServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AppConnectorsServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AppConnectorsServiceTransport, '_prep_wrapped_messages') as prep: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AppConnectorsServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AppConnectorsServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AppConnectorsServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AppConnectorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AppConnectorsServiceClient, transports.AppConnectorsServiceGrpcTransport), - (AppConnectorsServiceAsyncClient, transports.AppConnectorsServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/.coveragerc b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/.coveragerc deleted file mode 100644 index 98132b6d6726..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/beyondcorp_appgateways/__init__.py - google/cloud/beyondcorp_appgateways/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/.flake8 b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/MANIFEST.in b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/MANIFEST.in deleted file mode 100644 index 0426ddd842ce..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/beyondcorp_appgateways *.py -recursive-include google/cloud/beyondcorp_appgateways_v1 *.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/README.rst b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/README.rst deleted file mode 100644 index ab15f1935256..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Beyondcorp Appgateways API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Beyondcorp Appgateways API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/app_gateways_service.rst b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/app_gateways_service.rst deleted file mode 100644 index 74fb439412c6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/app_gateways_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AppGatewaysService ------------------------------------- - -.. automodule:: google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service - :members: - :inherited-members: - -.. automodule:: google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/services_.rst b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/services_.rst deleted file mode 100644 index 437164805c28..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Beyondcorp Appgateways v1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - app_gateways_service diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/types_.rst b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/types_.rst deleted file mode 100644 index 1175a33e27df..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/beyondcorp_appgateways_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Beyondcorp Appgateways v1 API -==================================================== - -.. automodule:: google.cloud.beyondcorp_appgateways_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/conf.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/conf.py deleted file mode 100644 index 04ee1d4d4c96..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-beyondcorp-appgateways documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-beyondcorp-appgateways" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-beyondcorp-appgateways-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-appgateways.tex", - u"google-cloud-beyondcorp-appgateways Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-beyondcorp-appgateways", - u"Google Cloud Beyondcorp Appgateways Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-appgateways", - u"google-cloud-beyondcorp-appgateways Documentation", - author, - "google-cloud-beyondcorp-appgateways", - "GAPIC library for Google Cloud Beyondcorp Appgateways API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/index.rst b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/index.rst deleted file mode 100644 index 0eab87592d94..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - beyondcorp_appgateways_v1/services_ - beyondcorp_appgateways_v1/types_ diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/__init__.py deleted file mode 100644 index 16788c2fe94a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_appgateways import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.client import AppGatewaysServiceClient -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.async_client import AppGatewaysServiceAsyncClient - -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import AppGateway -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import AppGatewayOperationMetadata -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import CreateAppGatewayRequest -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import DeleteAppGatewayRequest -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import GetAppGatewayRequest -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import ListAppGatewaysRequest -from google.cloud.beyondcorp_appgateways_v1.types.app_gateways_service import ListAppGatewaysResponse - -__all__ = ('AppGatewaysServiceClient', - 'AppGatewaysServiceAsyncClient', - 'AppGateway', - 'AppGatewayOperationMetadata', - 'CreateAppGatewayRequest', - 'DeleteAppGatewayRequest', - 'GetAppGatewayRequest', - 'ListAppGatewaysRequest', - 'ListAppGatewaysResponse', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/py.typed b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/py.typed deleted file mode 100644 index 7ebf96065350..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-appgateways package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/__init__.py deleted file mode 100644 index 5f551194a578..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_appgateways_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.app_gateways_service import AppGatewaysServiceClient -from .services.app_gateways_service import AppGatewaysServiceAsyncClient - -from .types.app_gateways_service import AppGateway -from .types.app_gateways_service import AppGatewayOperationMetadata -from .types.app_gateways_service import CreateAppGatewayRequest -from .types.app_gateways_service import DeleteAppGatewayRequest -from .types.app_gateways_service import GetAppGatewayRequest -from .types.app_gateways_service import ListAppGatewaysRequest -from .types.app_gateways_service import ListAppGatewaysResponse - -__all__ = ( - 'AppGatewaysServiceAsyncClient', -'AppGateway', -'AppGatewayOperationMetadata', -'AppGatewaysServiceClient', -'CreateAppGatewayRequest', -'DeleteAppGatewayRequest', -'GetAppGatewayRequest', -'ListAppGatewaysRequest', -'ListAppGatewaysResponse', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/gapic_metadata.json deleted file mode 100644 index 8a880dc69b3c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/gapic_metadata.json +++ /dev/null @@ -1,88 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.beyondcorp_appgateways_v1", - "protoPackage": "google.cloud.beyondcorp.appgateways.v1", - "schema": "1.0", - "services": { - "AppGatewaysService": { - "clients": { - "grpc": { - "libraryClient": "AppGatewaysServiceClient", - "rpcs": { - "CreateAppGateway": { - "methods": [ - "create_app_gateway" - ] - }, - "DeleteAppGateway": { - "methods": [ - "delete_app_gateway" - ] - }, - "GetAppGateway": { - "methods": [ - "get_app_gateway" - ] - }, - "ListAppGateways": { - "methods": [ - "list_app_gateways" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AppGatewaysServiceAsyncClient", - "rpcs": { - "CreateAppGateway": { - "methods": [ - "create_app_gateway" - ] - }, - "DeleteAppGateway": { - "methods": [ - "delete_app_gateway" - ] - }, - "GetAppGateway": { - "methods": [ - "get_app_gateway" - ] - }, - "ListAppGateways": { - "methods": [ - "list_app_gateways" - ] - } - } - }, - "rest": { - "libraryClient": "AppGatewaysServiceClient", - "rpcs": { - "CreateAppGateway": { - "methods": [ - "create_app_gateway" - ] - }, - "DeleteAppGateway": { - "methods": [ - "delete_app_gateway" - ] - }, - "GetAppGateway": { - "methods": [ - "get_app_gateway" - ] - }, - "ListAppGateways": { - "methods": [ - "list_app_gateways" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/py.typed b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/py.typed deleted file mode 100644 index 7ebf96065350..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-appgateways package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/__init__.py deleted file mode 100644 index 1eedd4fd8223..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AppGatewaysServiceClient -from .async_client import AppGatewaysServiceAsyncClient - -__all__ = ( - 'AppGatewaysServiceClient', - 'AppGatewaysServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py deleted file mode 100644 index 6211b879ecf8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py +++ /dev/null @@ -1,1404 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.beyondcorp_appgateways_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service import pagers -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AppGatewaysServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AppGatewaysServiceGrpcAsyncIOTransport -from .client import AppGatewaysServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AppGatewaysServiceAsyncClient: - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppGatewaysService exposes the following resources: - - - AppGateways, named as follows: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}``. - - The AppGatewaysService service provides methods to manage - (create/read/update/delete) BeyondCorp AppGateways. - """ - - _client: AppGatewaysServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AppGatewaysServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AppGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AppGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AppGatewaysServiceClient._DEFAULT_UNIVERSE - - app_gateway_path = staticmethod(AppGatewaysServiceClient.app_gateway_path) - parse_app_gateway_path = staticmethod(AppGatewaysServiceClient.parse_app_gateway_path) - common_billing_account_path = staticmethod(AppGatewaysServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AppGatewaysServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AppGatewaysServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AppGatewaysServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AppGatewaysServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AppGatewaysServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AppGatewaysServiceClient.common_project_path) - parse_common_project_path = staticmethod(AppGatewaysServiceClient.parse_common_project_path) - common_location_path = staticmethod(AppGatewaysServiceClient.common_location_path) - parse_common_location_path = staticmethod(AppGatewaysServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppGatewaysServiceAsyncClient: The constructed client. - """ - return AppGatewaysServiceClient.from_service_account_info.__func__(AppGatewaysServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppGatewaysServiceAsyncClient: The constructed client. - """ - return AppGatewaysServiceClient.from_service_account_file.__func__(AppGatewaysServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AppGatewaysServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AppGatewaysServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AppGatewaysServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AppGatewaysServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AppGatewaysServiceTransport, Callable[..., AppGatewaysServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the app gateways service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AppGatewaysServiceTransport,Callable[..., AppGatewaysServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AppGatewaysServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AppGatewaysServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "credentialsType": None, - } - ) - - async def list_app_gateways(self, - request: Optional[Union[app_gateways_service.ListAppGatewaysRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAppGatewaysAsyncPager: - r"""Lists AppGateways in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - async def sample_list_app_gateways(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.ListAppGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_gateways(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysRequest, dict]]): - The request object. Request message for - BeyondCorp.ListAppGateways. - parent (:class:`str`): - Required. The resource name of the AppGateway location - using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.pagers.ListAppGatewaysAsyncPager: - Response message for - BeyondCorp.ListAppGateways. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.ListAppGatewaysRequest): - request = app_gateways_service.ListAppGatewaysRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_app_gateways] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAppGatewaysAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_app_gateway(self, - request: Optional[Union[app_gateways_service.GetAppGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> app_gateways_service.AppGateway: - r"""Gets details of a single AppGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - async def sample_get_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.GetAppGatewayRequest( - name="name_value", - ) - - # Make the request - response = await client.get_app_gateway(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appgateways_v1.types.GetAppGatewayRequest, dict]]): - The request object. Request message for - BeyondCorp.GetAppGateway. - name (:class:`str`): - Required. BeyondCorp AppGateway name using the form: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appgateways_v1.types.AppGateway: - A BeyondCorp AppGateway resource - represents a BeyondCorp protected - AppGateway to a remote application. It - creates all the necessary GCP components - needed for creating a BeyondCorp - protected AppGateway. Multiple - connectors can be authorised for a - single AppGateway. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.GetAppGatewayRequest): - request = app_gateways_service.GetAppGatewayRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_app_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_app_gateway(self, - request: Optional[Union[app_gateways_service.CreateAppGatewayRequest, dict]] = None, - *, - parent: Optional[str] = None, - app_gateway: Optional[app_gateways_service.AppGateway] = None, - app_gateway_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new AppGateway in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - async def sample_create_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - app_gateway = beyondcorp_appgateways_v1.AppGateway() - app_gateway.name = "name_value" - app_gateway.type_ = "TCP_PROXY" - app_gateway.host_type = "GCP_REGIONAL_MIG" - - request = beyondcorp_appgateways_v1.CreateAppGatewayRequest( - parent="parent_value", - app_gateway=app_gateway, - ) - - # Make the request - operation = client.create_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appgateways_v1.types.CreateAppGatewayRequest, dict]]): - The request object. Request message for - BeyondCorp.CreateAppGateway. - parent (:class:`str`): - Required. The resource project name of the AppGateway - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_gateway (:class:`google.cloud.beyondcorp_appgateways_v1.types.AppGateway`): - Required. A BeyondCorp AppGateway - resource. - - This corresponds to the ``app_gateway`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_gateway_id (:class:`str`): - Optional. User-settable AppGateway resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``app_gateway_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appgateways_v1.types.AppGateway` A BeyondCorp AppGateway resource represents a BeyondCorp protected AppGateway - to a remote application. It creates all the necessary - GCP components needed for creating a BeyondCorp - protected AppGateway. Multiple connectors can be - authorised for a single AppGateway. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, app_gateway, app_gateway_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.CreateAppGatewayRequest): - request = app_gateways_service.CreateAppGatewayRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if app_gateway is not None: - request.app_gateway = app_gateway - if app_gateway_id is not None: - request.app_gateway_id = app_gateway_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_app_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - app_gateways_service.AppGateway, - metadata_type=app_gateways_service.AppGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_app_gateway(self, - request: Optional[Union[app_gateways_service.DeleteAppGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single AppGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - async def sample_delete_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.DeleteAppGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_appgateways_v1.types.DeleteAppGatewayRequest, dict]]): - The request object. Request message for - BeyondCorp.DeleteAppGateway. - name (:class:`str`): - Required. BeyondCorp AppGateway name using the form: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.DeleteAppGatewayRequest): - request = app_gateways_service.DeleteAppGatewayRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_app_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=app_gateways_service.AppGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AppGatewaysServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AppGatewaysServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py deleted file mode 100644 index 84020867b6e7..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py +++ /dev/null @@ -1,1785 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.beyondcorp_appgateways_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service import pagers -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AppGatewaysServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AppGatewaysServiceGrpcTransport -from .transports.grpc_asyncio import AppGatewaysServiceGrpcAsyncIOTransport -from .transports.rest import AppGatewaysServiceRestTransport - - -class AppGatewaysServiceClientMeta(type): - """Metaclass for the AppGatewaysService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AppGatewaysServiceTransport]] - _transport_registry["grpc"] = AppGatewaysServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AppGatewaysServiceGrpcAsyncIOTransport - _transport_registry["rest"] = AppGatewaysServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AppGatewaysServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AppGatewaysServiceClient(metaclass=AppGatewaysServiceClientMeta): - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppGatewaysService exposes the following resources: - - - AppGateways, named as follows: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}``. - - The AppGatewaysService service provides methods to manage - (create/read/update/delete) BeyondCorp AppGateways. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "beyondcorp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "beyondcorp.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppGatewaysServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AppGatewaysServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AppGatewaysServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AppGatewaysServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def app_gateway_path(project: str,location: str,app_gateway: str,) -> str: - """Returns a fully-qualified app_gateway string.""" - return "projects/{project}/locations/{location}/appGateways/{app_gateway}".format(project=project, location=location, app_gateway=app_gateway, ) - - @staticmethod - def parse_app_gateway_path(path: str) -> Dict[str,str]: - """Parses a app_gateway path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/appGateways/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AppGatewaysServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AppGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AppGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AppGatewaysServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AppGatewaysServiceTransport, Callable[..., AppGatewaysServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the app gateways service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AppGatewaysServiceTransport,Callable[..., AppGatewaysServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AppGatewaysServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AppGatewaysServiceClient._read_environment_variables() - self._client_cert_source = AppGatewaysServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AppGatewaysServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AppGatewaysServiceTransport) - if transport_provided: - # transport is a AppGatewaysServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AppGatewaysServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AppGatewaysServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AppGatewaysServiceTransport], Callable[..., AppGatewaysServiceTransport]] = ( - AppGatewaysServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AppGatewaysServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "credentialsType": None, - } - ) - - def list_app_gateways(self, - request: Optional[Union[app_gateways_service.ListAppGatewaysRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAppGatewaysPager: - r"""Lists AppGateways in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - def sample_list_app_gateways(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.ListAppGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_gateways(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysRequest, dict]): - The request object. Request message for - BeyondCorp.ListAppGateways. - parent (str): - Required. The resource name of the AppGateway location - using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.pagers.ListAppGatewaysPager: - Response message for - BeyondCorp.ListAppGateways. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.ListAppGatewaysRequest): - request = app_gateways_service.ListAppGatewaysRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_app_gateways] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAppGatewaysPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_app_gateway(self, - request: Optional[Union[app_gateways_service.GetAppGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> app_gateways_service.AppGateway: - r"""Gets details of a single AppGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - def sample_get_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.GetAppGatewayRequest( - name="name_value", - ) - - # Make the request - response = client.get_app_gateway(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appgateways_v1.types.GetAppGatewayRequest, dict]): - The request object. Request message for - BeyondCorp.GetAppGateway. - name (str): - Required. BeyondCorp AppGateway name using the form: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_appgateways_v1.types.AppGateway: - A BeyondCorp AppGateway resource - represents a BeyondCorp protected - AppGateway to a remote application. It - creates all the necessary GCP components - needed for creating a BeyondCorp - protected AppGateway. Multiple - connectors can be authorised for a - single AppGateway. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.GetAppGatewayRequest): - request = app_gateways_service.GetAppGatewayRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_app_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_app_gateway(self, - request: Optional[Union[app_gateways_service.CreateAppGatewayRequest, dict]] = None, - *, - parent: Optional[str] = None, - app_gateway: Optional[app_gateways_service.AppGateway] = None, - app_gateway_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new AppGateway in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - def sample_create_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - app_gateway = beyondcorp_appgateways_v1.AppGateway() - app_gateway.name = "name_value" - app_gateway.type_ = "TCP_PROXY" - app_gateway.host_type = "GCP_REGIONAL_MIG" - - request = beyondcorp_appgateways_v1.CreateAppGatewayRequest( - parent="parent_value", - app_gateway=app_gateway, - ) - - # Make the request - operation = client.create_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appgateways_v1.types.CreateAppGatewayRequest, dict]): - The request object. Request message for - BeyondCorp.CreateAppGateway. - parent (str): - Required. The resource project name of the AppGateway - location using the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_gateway (google.cloud.beyondcorp_appgateways_v1.types.AppGateway): - Required. A BeyondCorp AppGateway - resource. - - This corresponds to the ``app_gateway`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - app_gateway_id (str): - Optional. User-settable AppGateway resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``app_gateway_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.beyondcorp_appgateways_v1.types.AppGateway` A BeyondCorp AppGateway resource represents a BeyondCorp protected AppGateway - to a remote application. It creates all the necessary - GCP components needed for creating a BeyondCorp - protected AppGateway. Multiple connectors can be - authorised for a single AppGateway. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, app_gateway, app_gateway_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.CreateAppGatewayRequest): - request = app_gateways_service.CreateAppGatewayRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if app_gateway is not None: - request.app_gateway = app_gateway - if app_gateway_id is not None: - request.app_gateway_id = app_gateway_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_app_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - app_gateways_service.AppGateway, - metadata_type=app_gateways_service.AppGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_app_gateway(self, - request: Optional[Union[app_gateways_service.DeleteAppGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single AppGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_appgateways_v1 - - def sample_delete_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.DeleteAppGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_appgateways_v1.types.DeleteAppGatewayRequest, dict]): - The request object. Request message for - BeyondCorp.DeleteAppGateway. - name (str): - Required. BeyondCorp AppGateway name using the form: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, app_gateways_service.DeleteAppGatewayRequest): - request = app_gateways_service.DeleteAppGatewayRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_app_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=app_gateways_service.AppGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AppGatewaysServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AppGatewaysServiceClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/pagers.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/pagers.py deleted file mode 100644 index c3c951ebd52e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service - - -class ListAppGatewaysPager: - """A pager for iterating through ``list_app_gateways`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysResponse` object, and - provides an ``__iter__`` method to iterate through its - ``app_gateways`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAppGateways`` requests and continue to iterate - through the ``app_gateways`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., app_gateways_service.ListAppGatewaysResponse], - request: app_gateways_service.ListAppGatewaysRequest, - response: app_gateways_service.ListAppGatewaysResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysRequest): - The initial request object. - response (google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_gateways_service.ListAppGatewaysRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[app_gateways_service.ListAppGatewaysResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[app_gateways_service.AppGateway]: - for page in self.pages: - yield from page.app_gateways - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAppGatewaysAsyncPager: - """A pager for iterating through ``list_app_gateways`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``app_gateways`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAppGateways`` requests and continue to iterate - through the ``app_gateways`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[app_gateways_service.ListAppGatewaysResponse]], - request: app_gateways_service.ListAppGatewaysRequest, - response: app_gateways_service.ListAppGatewaysResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysRequest): - The initial request object. - response (google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = app_gateways_service.ListAppGatewaysRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[app_gateways_service.ListAppGatewaysResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[app_gateways_service.AppGateway]: - async def async_generator(): - async for page in self.pages: - for response in page.app_gateways: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/README.rst b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/README.rst deleted file mode 100644 index cbce0485792f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AppGatewaysServiceTransport` is the ABC for all transports. -- public child `AppGatewaysServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AppGatewaysServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAppGatewaysServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AppGatewaysServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/__init__.py deleted file mode 100644 index 367dea0b343b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AppGatewaysServiceTransport -from .grpc import AppGatewaysServiceGrpcTransport -from .grpc_asyncio import AppGatewaysServiceGrpcAsyncIOTransport -from .rest import AppGatewaysServiceRestTransport -from .rest import AppGatewaysServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AppGatewaysServiceTransport]] -_transport_registry['grpc'] = AppGatewaysServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AppGatewaysServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AppGatewaysServiceRestTransport - -__all__ = ( - 'AppGatewaysServiceTransport', - 'AppGatewaysServiceGrpcTransport', - 'AppGatewaysServiceGrpcAsyncIOTransport', - 'AppGatewaysServiceRestTransport', - 'AppGatewaysServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/base.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/base.py deleted file mode 100644 index d290ecee7cbf..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/base.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.beyondcorp_appgateways_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AppGatewaysServiceTransport(abc.ABC): - """Abstract transport class for AppGatewaysService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'beyondcorp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_app_gateways: gapic_v1.method.wrap_method( - self.list_app_gateways, - default_timeout=None, - client_info=client_info, - ), - self.get_app_gateway: gapic_v1.method.wrap_method( - self.get_app_gateway, - default_timeout=None, - client_info=client_info, - ), - self.create_app_gateway: gapic_v1.method.wrap_method( - self.create_app_gateway, - default_timeout=None, - client_info=client_info, - ), - self.delete_app_gateway: gapic_v1.method.wrap_method( - self.delete_app_gateway, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_app_gateways(self) -> Callable[ - [app_gateways_service.ListAppGatewaysRequest], - Union[ - app_gateways_service.ListAppGatewaysResponse, - Awaitable[app_gateways_service.ListAppGatewaysResponse] - ]]: - raise NotImplementedError() - - @property - def get_app_gateway(self) -> Callable[ - [app_gateways_service.GetAppGatewayRequest], - Union[ - app_gateways_service.AppGateway, - Awaitable[app_gateways_service.AppGateway] - ]]: - raise NotImplementedError() - - @property - def create_app_gateway(self) -> Callable[ - [app_gateways_service.CreateAppGatewayRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_app_gateway(self) -> Callable[ - [app_gateways_service.DeleteAppGatewayRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AppGatewaysServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/grpc.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/grpc.py deleted file mode 100644 index cee4e7fec31f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/grpc.py +++ /dev/null @@ -1,648 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import AppGatewaysServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AppGatewaysServiceGrpcTransport(AppGatewaysServiceTransport): - """gRPC backend transport for AppGatewaysService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppGatewaysService exposes the following resources: - - - AppGateways, named as follows: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}``. - - The AppGatewaysService service provides methods to manage - (create/read/update/delete) BeyondCorp AppGateways. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_app_gateways(self) -> Callable[ - [app_gateways_service.ListAppGatewaysRequest], - app_gateways_service.ListAppGatewaysResponse]: - r"""Return a callable for the list app gateways method over gRPC. - - Lists AppGateways in a given project and location. - - Returns: - Callable[[~.ListAppGatewaysRequest], - ~.ListAppGatewaysResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_app_gateways' not in self._stubs: - self._stubs['list_app_gateways'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/ListAppGateways', - request_serializer=app_gateways_service.ListAppGatewaysRequest.serialize, - response_deserializer=app_gateways_service.ListAppGatewaysResponse.deserialize, - ) - return self._stubs['list_app_gateways'] - - @property - def get_app_gateway(self) -> Callable[ - [app_gateways_service.GetAppGatewayRequest], - app_gateways_service.AppGateway]: - r"""Return a callable for the get app gateway method over gRPC. - - Gets details of a single AppGateway. - - Returns: - Callable[[~.GetAppGatewayRequest], - ~.AppGateway]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_app_gateway' not in self._stubs: - self._stubs['get_app_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/GetAppGateway', - request_serializer=app_gateways_service.GetAppGatewayRequest.serialize, - response_deserializer=app_gateways_service.AppGateway.deserialize, - ) - return self._stubs['get_app_gateway'] - - @property - def create_app_gateway(self) -> Callable[ - [app_gateways_service.CreateAppGatewayRequest], - operations_pb2.Operation]: - r"""Return a callable for the create app gateway method over gRPC. - - Creates a new AppGateway in a given project and - location. - - Returns: - Callable[[~.CreateAppGatewayRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_app_gateway' not in self._stubs: - self._stubs['create_app_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/CreateAppGateway', - request_serializer=app_gateways_service.CreateAppGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_app_gateway'] - - @property - def delete_app_gateway(self) -> Callable[ - [app_gateways_service.DeleteAppGatewayRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete app gateway method over gRPC. - - Deletes a single AppGateway. - - Returns: - Callable[[~.DeleteAppGatewayRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_app_gateway' not in self._stubs: - self._stubs['delete_app_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/DeleteAppGateway', - request_serializer=app_gateways_service.DeleteAppGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_app_gateway'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AppGatewaysServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/grpc_asyncio.py deleted file mode 100644 index f675ca2b9d04..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,728 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import AppGatewaysServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AppGatewaysServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AppGatewaysServiceGrpcAsyncIOTransport(AppGatewaysServiceTransport): - """gRPC AsyncIO backend transport for AppGatewaysService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppGatewaysService exposes the following resources: - - - AppGateways, named as follows: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}``. - - The AppGatewaysService service provides methods to manage - (create/read/update/delete) BeyondCorp AppGateways. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_app_gateways(self) -> Callable[ - [app_gateways_service.ListAppGatewaysRequest], - Awaitable[app_gateways_service.ListAppGatewaysResponse]]: - r"""Return a callable for the list app gateways method over gRPC. - - Lists AppGateways in a given project and location. - - Returns: - Callable[[~.ListAppGatewaysRequest], - Awaitable[~.ListAppGatewaysResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_app_gateways' not in self._stubs: - self._stubs['list_app_gateways'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/ListAppGateways', - request_serializer=app_gateways_service.ListAppGatewaysRequest.serialize, - response_deserializer=app_gateways_service.ListAppGatewaysResponse.deserialize, - ) - return self._stubs['list_app_gateways'] - - @property - def get_app_gateway(self) -> Callable[ - [app_gateways_service.GetAppGatewayRequest], - Awaitable[app_gateways_service.AppGateway]]: - r"""Return a callable for the get app gateway method over gRPC. - - Gets details of a single AppGateway. - - Returns: - Callable[[~.GetAppGatewayRequest], - Awaitable[~.AppGateway]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_app_gateway' not in self._stubs: - self._stubs['get_app_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/GetAppGateway', - request_serializer=app_gateways_service.GetAppGatewayRequest.serialize, - response_deserializer=app_gateways_service.AppGateway.deserialize, - ) - return self._stubs['get_app_gateway'] - - @property - def create_app_gateway(self) -> Callable[ - [app_gateways_service.CreateAppGatewayRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create app gateway method over gRPC. - - Creates a new AppGateway in a given project and - location. - - Returns: - Callable[[~.CreateAppGatewayRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_app_gateway' not in self._stubs: - self._stubs['create_app_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/CreateAppGateway', - request_serializer=app_gateways_service.CreateAppGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_app_gateway'] - - @property - def delete_app_gateway(self) -> Callable[ - [app_gateways_service.DeleteAppGatewayRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete app gateway method over gRPC. - - Deletes a single AppGateway. - - Returns: - Callable[[~.DeleteAppGatewayRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_app_gateway' not in self._stubs: - self._stubs['delete_app_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.appgateways.v1.AppGatewaysService/DeleteAppGateway', - request_serializer=app_gateways_service.DeleteAppGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_app_gateway'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_app_gateways: self._wrap_method( - self.list_app_gateways, - default_timeout=None, - client_info=client_info, - ), - self.get_app_gateway: self._wrap_method( - self.get_app_gateway, - default_timeout=None, - client_info=client_info, - ), - self.create_app_gateway: self._wrap_method( - self.create_app_gateway, - default_timeout=None, - client_info=client_info, - ), - self.delete_app_gateway: self._wrap_method( - self.delete_app_gateway, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'AppGatewaysServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py deleted file mode 100644 index b34fd0030f04..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest.py +++ /dev/null @@ -1,2156 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseAppGatewaysServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class AppGatewaysServiceRestInterceptor: - """Interceptor for AppGatewaysService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the AppGatewaysServiceRestTransport. - - .. code-block:: python - class MyCustomAppGatewaysServiceInterceptor(AppGatewaysServiceRestInterceptor): - def pre_create_app_gateway(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_app_gateway(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_app_gateway(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_app_gateway(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_app_gateway(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_app_gateway(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_app_gateways(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_app_gateways(self, response): - logging.log(f"Received response: {response}") - return response - - transport = AppGatewaysServiceRestTransport(interceptor=MyCustomAppGatewaysServiceInterceptor()) - client = AppGatewaysServiceClient(transport=transport) - - - """ - def pre_create_app_gateway(self, request: app_gateways_service.CreateAppGatewayRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_gateways_service.CreateAppGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_app_gateway - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_create_app_gateway(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_app_gateway - - DEPRECATED. Please use the `post_create_app_gateway_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. This `post_create_app_gateway` interceptor runs - before the `post_create_app_gateway_with_metadata` interceptor. - """ - return response - - def post_create_app_gateway_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_app_gateway - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppGatewaysService server but before it is returned to user code. - - We recommend only using this `post_create_app_gateway_with_metadata` - interceptor in new development instead of the `post_create_app_gateway` interceptor. - When both interceptors are used, this `post_create_app_gateway_with_metadata` interceptor runs after the - `post_create_app_gateway` interceptor. The (possibly modified) response returned by - `post_create_app_gateway` will be passed to - `post_create_app_gateway_with_metadata`. - """ - return response, metadata - - def pre_delete_app_gateway(self, request: app_gateways_service.DeleteAppGatewayRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_gateways_service.DeleteAppGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_app_gateway - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_delete_app_gateway(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_app_gateway - - DEPRECATED. Please use the `post_delete_app_gateway_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. This `post_delete_app_gateway` interceptor runs - before the `post_delete_app_gateway_with_metadata` interceptor. - """ - return response - - def post_delete_app_gateway_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_app_gateway - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppGatewaysService server but before it is returned to user code. - - We recommend only using this `post_delete_app_gateway_with_metadata` - interceptor in new development instead of the `post_delete_app_gateway` interceptor. - When both interceptors are used, this `post_delete_app_gateway_with_metadata` interceptor runs after the - `post_delete_app_gateway` interceptor. The (possibly modified) response returned by - `post_delete_app_gateway` will be passed to - `post_delete_app_gateway_with_metadata`. - """ - return response, metadata - - def pre_get_app_gateway(self, request: app_gateways_service.GetAppGatewayRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_gateways_service.GetAppGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_app_gateway - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_get_app_gateway(self, response: app_gateways_service.AppGateway) -> app_gateways_service.AppGateway: - """Post-rpc interceptor for get_app_gateway - - DEPRECATED. Please use the `post_get_app_gateway_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. This `post_get_app_gateway` interceptor runs - before the `post_get_app_gateway_with_metadata` interceptor. - """ - return response - - def post_get_app_gateway_with_metadata(self, response: app_gateways_service.AppGateway, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_gateways_service.AppGateway, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_app_gateway - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppGatewaysService server but before it is returned to user code. - - We recommend only using this `post_get_app_gateway_with_metadata` - interceptor in new development instead of the `post_get_app_gateway` interceptor. - When both interceptors are used, this `post_get_app_gateway_with_metadata` interceptor runs after the - `post_get_app_gateway` interceptor. The (possibly modified) response returned by - `post_get_app_gateway` will be passed to - `post_get_app_gateway_with_metadata`. - """ - return response, metadata - - def pre_list_app_gateways(self, request: app_gateways_service.ListAppGatewaysRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_gateways_service.ListAppGatewaysRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_app_gateways - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_list_app_gateways(self, response: app_gateways_service.ListAppGatewaysResponse) -> app_gateways_service.ListAppGatewaysResponse: - """Post-rpc interceptor for list_app_gateways - - DEPRECATED. Please use the `post_list_app_gateways_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. This `post_list_app_gateways` interceptor runs - before the `post_list_app_gateways_with_metadata` interceptor. - """ - return response - - def post_list_app_gateways_with_metadata(self, response: app_gateways_service.ListAppGatewaysResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[app_gateways_service.ListAppGatewaysResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_app_gateways - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the AppGatewaysService server but before it is returned to user code. - - We recommend only using this `post_list_app_gateways_with_metadata` - interceptor in new development instead of the `post_list_app_gateways` interceptor. - When both interceptors are used, this `post_list_app_gateways_with_metadata` interceptor runs after the - `post_list_app_gateways` interceptor. The (possibly modified) response returned by - `post_list_app_gateways` will be passed to - `post_list_app_gateways_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the AppGatewaysService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the AppGatewaysService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class AppGatewaysServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: AppGatewaysServiceRestInterceptor - - -class AppGatewaysServiceRestTransport(_BaseAppGatewaysServiceRestTransport): - """REST backend synchronous transport for AppGatewaysService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The AppGatewaysService exposes the following resources: - - - AppGateways, named as follows: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}``. - - The AppGatewaysService service provides methods to manage - (create/read/update/delete) BeyondCorp AppGateways. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AppGatewaysServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or AppGatewaysServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateAppGateway(_BaseAppGatewaysServiceRestTransport._BaseCreateAppGateway, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.CreateAppGateway") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: app_gateways_service.CreateAppGatewayRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create app gateway method over HTTP. - - Args: - request (~.app_gateways_service.CreateAppGatewayRequest): - The request object. Request message for - BeyondCorp.CreateAppGateway. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseCreateAppGateway._get_http_options() - - request, metadata = self._interceptor.pre_create_app_gateway(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseCreateAppGateway._get_transcoded_request(http_options, request) - - body = _BaseAppGatewaysServiceRestTransport._BaseCreateAppGateway._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseCreateAppGateway._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.CreateAppGateway", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "CreateAppGateway", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._CreateAppGateway._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_app_gateway(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_app_gateway_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.create_app_gateway", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "CreateAppGateway", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteAppGateway(_BaseAppGatewaysServiceRestTransport._BaseDeleteAppGateway, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.DeleteAppGateway") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_gateways_service.DeleteAppGatewayRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete app gateway method over HTTP. - - Args: - request (~.app_gateways_service.DeleteAppGatewayRequest): - The request object. Request message for - BeyondCorp.DeleteAppGateway. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseDeleteAppGateway._get_http_options() - - request, metadata = self._interceptor.pre_delete_app_gateway(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseDeleteAppGateway._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseDeleteAppGateway._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.DeleteAppGateway", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "DeleteAppGateway", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._DeleteAppGateway._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_app_gateway(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_app_gateway_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.delete_app_gateway", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "DeleteAppGateway", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetAppGateway(_BaseAppGatewaysServiceRestTransport._BaseGetAppGateway, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.GetAppGateway") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_gateways_service.GetAppGatewayRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_gateways_service.AppGateway: - r"""Call the get app gateway method over HTTP. - - Args: - request (~.app_gateways_service.GetAppGatewayRequest): - The request object. Request message for - BeyondCorp.GetAppGateway. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_gateways_service.AppGateway: - A BeyondCorp AppGateway resource - represents a BeyondCorp protected - AppGateway to a remote application. It - creates all the necessary GCP components - needed for creating a BeyondCorp - protected AppGateway. Multiple - connectors can be authorised for a - single AppGateway. - - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseGetAppGateway._get_http_options() - - request, metadata = self._interceptor.pre_get_app_gateway(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseGetAppGateway._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseGetAppGateway._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.GetAppGateway", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetAppGateway", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._GetAppGateway._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_gateways_service.AppGateway() - pb_resp = app_gateways_service.AppGateway.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_app_gateway(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_app_gateway_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_gateways_service.AppGateway.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.get_app_gateway", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetAppGateway", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAppGateways(_BaseAppGatewaysServiceRestTransport._BaseListAppGateways, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.ListAppGateways") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: app_gateways_service.ListAppGatewaysRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> app_gateways_service.ListAppGatewaysResponse: - r"""Call the list app gateways method over HTTP. - - Args: - request (~.app_gateways_service.ListAppGatewaysRequest): - The request object. Request message for - BeyondCorp.ListAppGateways. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.app_gateways_service.ListAppGatewaysResponse: - Response message for - BeyondCorp.ListAppGateways. - - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseListAppGateways._get_http_options() - - request, metadata = self._interceptor.pre_list_app_gateways(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseListAppGateways._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseListAppGateways._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.ListAppGateways", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "ListAppGateways", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._ListAppGateways._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = app_gateways_service.ListAppGatewaysResponse() - pb_resp = app_gateways_service.ListAppGatewaysResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_app_gateways(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_app_gateways_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = app_gateways_service.ListAppGatewaysResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.list_app_gateways", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "ListAppGateways", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_app_gateway(self) -> Callable[ - [app_gateways_service.CreateAppGatewayRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAppGateway(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_app_gateway(self) -> Callable[ - [app_gateways_service.DeleteAppGatewayRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAppGateway(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_app_gateway(self) -> Callable[ - [app_gateways_service.GetAppGatewayRequest], - app_gateways_service.AppGateway]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetAppGateway(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_app_gateways(self) -> Callable[ - [app_gateways_service.ListAppGatewaysRequest], - app_gateways_service.ListAppGatewaysResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAppGateways(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseAppGatewaysServiceRestTransport._BaseGetLocation, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseAppGatewaysServiceRestTransport._BaseListLocations, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(_BaseAppGatewaysServiceRestTransport._BaseGetIamPolicy, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(_BaseAppGatewaysServiceRestTransport._BaseSetIamPolicy, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseAppGatewaysServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "SetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(_BaseAppGatewaysServiceRestTransport._BaseTestIamPermissions, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseAppGatewaysServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "TestIamPermissions", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseAppGatewaysServiceRestTransport._BaseCancelOperation, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseAppGatewaysServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseAppGatewaysServiceRestTransport._BaseDeleteOperation, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseAppGatewaysServiceRestTransport._BaseGetOperation, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseAppGatewaysServiceRestTransport._BaseListOperations, AppGatewaysServiceRestStub): - def __hash__(self): - return hash("AppGatewaysServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseAppGatewaysServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseAppGatewaysServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseAppGatewaysServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = AppGatewaysServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.appgateways_v1.AppGatewaysServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'AppGatewaysServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest_base.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest_base.py deleted file mode 100644 index 1595872990b0..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/transports/rest_base.py +++ /dev/null @@ -1,549 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import AppGatewaysServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseAppGatewaysServiceRestTransport(AppGatewaysServiceTransport): - """Base REST backend transport for AppGatewaysService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateAppGateway: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/appGateways', - 'body': 'app_gateway', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_gateways_service.CreateAppGatewayRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppGatewaysServiceRestTransport._BaseCreateAppGateway._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteAppGateway: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/appGateways/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_gateways_service.DeleteAppGatewayRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppGatewaysServiceRestTransport._BaseDeleteAppGateway._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetAppGateway: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/appGateways/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_gateways_service.GetAppGatewayRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppGatewaysServiceRestTransport._BaseGetAppGateway._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAppGateways: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/appGateways', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = app_gateways_service.ListAppGatewaysRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseAppGatewaysServiceRestTransport._BaseListAppGateways._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseAppGatewaysServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/types/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/types/__init__.py deleted file mode 100644 index a0eeca56e62a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/types/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .app_gateways_service import ( - AppGateway, - AppGatewayOperationMetadata, - CreateAppGatewayRequest, - DeleteAppGatewayRequest, - GetAppGatewayRequest, - ListAppGatewaysRequest, - ListAppGatewaysResponse, -) - -__all__ = ( - 'AppGateway', - 'AppGatewayOperationMetadata', - 'CreateAppGatewayRequest', - 'DeleteAppGatewayRequest', - 'GetAppGatewayRequest', - 'ListAppGatewaysRequest', - 'ListAppGatewaysResponse', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/types/app_gateways_service.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/types/app_gateways_service.py deleted file mode 100644 index e0f46b3d81bc..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/google/cloud/beyondcorp_appgateways_v1/types/app_gateways_service.py +++ /dev/null @@ -1,484 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.appgateways.v1', - manifest={ - 'ListAppGatewaysRequest', - 'ListAppGatewaysResponse', - 'GetAppGatewayRequest', - 'CreateAppGatewayRequest', - 'DeleteAppGatewayRequest', - 'AppGateway', - 'AppGatewayOperationMetadata', - }, -) - - -class ListAppGatewaysRequest(proto.Message): - r"""Request message for BeyondCorp.ListAppGateways. - - Attributes: - parent (str): - Required. The resource name of the AppGateway location using - the form: ``projects/{project_id}/locations/{location_id}`` - page_size (int): - Optional. The maximum number of items to return. If not - specified, a default value of 50 will be used by the - service. Regardless of the page_size value, the response may - include a partial list and a caller should only rely on - response's - [next_page_token][BeyondCorp.ListAppGatewaysResponse.next_page_token] - to determine if there are more instances left to be queried. - page_token (str): - Optional. The next_page_token value returned from a previous - ListAppGatewaysRequest, if any. - filter (str): - Optional. A filter specifying constraints of - a list operation. - order_by (str): - Optional. Specifies the ordering of results. See `Sorting - order `__ - for more information. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListAppGatewaysResponse(proto.Message): - r"""Response message for BeyondCorp.ListAppGateways. - - Attributes: - app_gateways (MutableSequence[google.cloud.beyondcorp_appgateways_v1.types.AppGateway]): - A list of BeyondCorp AppGateways in the - project. - next_page_token (str): - A token to retrieve the next page of results, - or empty if there are no more results in the - list. - unreachable (MutableSequence[str]): - A list of locations that could not be - reached. - """ - - @property - def raw_page(self): - return self - - app_gateways: MutableSequence['AppGateway'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AppGateway', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetAppGatewayRequest(proto.Message): - r"""Request message for BeyondCorp.GetAppGateway. - - Attributes: - name (str): - Required. BeyondCorp AppGateway name using the form: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateAppGatewayRequest(proto.Message): - r"""Request message for BeyondCorp.CreateAppGateway. - - Attributes: - parent (str): - Required. The resource project name of the AppGateway - location using the form: - ``projects/{project_id}/locations/{location_id}`` - app_gateway_id (str): - Optional. User-settable AppGateway resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - app_gateway (google.cloud.beyondcorp_appgateways_v1.types.AppGateway): - Required. A BeyondCorp AppGateway resource. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - app_gateway_id: str = proto.Field( - proto.STRING, - number=2, - ) - app_gateway: 'AppGateway' = proto.Field( - proto.MESSAGE, - number=3, - message='AppGateway', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DeleteAppGatewayRequest(proto.Message): - r"""Request message for BeyondCorp.DeleteAppGateway. - - Attributes: - name (str): - Required. BeyondCorp AppGateway name using the form: - ``projects/{project_id}/locations/{location_id}/appGateways/{app_gateway_id}`` - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class AppGateway(proto.Message): - r"""A BeyondCorp AppGateway resource represents a BeyondCorp - protected AppGateway to a remote application. It creates all the - necessary GCP components needed for creating a BeyondCorp - protected AppGateway. Multiple connectors can be authorised for - a single AppGateway. - - Attributes: - name (str): - Required. Unique resource name of the - AppGateway. The name is ignored when creating an - AppGateway. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the resource was - created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the resource was - last modified. - labels (MutableMapping[str, str]): - Optional. Resource labels to represent user - provided metadata. - display_name (str): - Optional. An arbitrary user-provided name for - the AppGateway. Cannot exceed 64 characters. - uid (str): - Output only. A unique identifier for the - instance generated by the system. - type_ (google.cloud.beyondcorp_appgateways_v1.types.AppGateway.Type): - Required. The type of network connectivity - used by the AppGateway. - state (google.cloud.beyondcorp_appgateways_v1.types.AppGateway.State): - Output only. The current state of the - AppGateway. - uri (str): - Output only. Server-defined URI for this - resource. - allocated_connections (MutableSequence[google.cloud.beyondcorp_appgateways_v1.types.AppGateway.AllocatedConnection]): - Output only. A list of connections allocated - for the Gateway - host_type (google.cloud.beyondcorp_appgateways_v1.types.AppGateway.HostType): - Required. The type of hosting used by the - AppGateway. - """ - class Type(proto.Enum): - r"""Enum containing list of all possible network connectivity - options supported by BeyondCorp AppGateway. - - Values: - TYPE_UNSPECIFIED (0): - Default value. This value is unused. - TCP_PROXY (1): - TCP Proxy based BeyondCorp Connection. API - will default to this if unset. - """ - TYPE_UNSPECIFIED = 0 - TCP_PROXY = 1 - - class State(proto.Enum): - r"""Represents the different states of an AppGateway. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - CREATING (1): - AppGateway is being created. - CREATED (2): - AppGateway has been created. - UPDATING (3): - AppGateway's configuration is being updated. - DELETING (4): - AppGateway is being deleted. - DOWN (5): - AppGateway is down and may be restored in the - future. This happens when CCFE sends - ProjectState = OFF. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - CREATED = 2 - UPDATING = 3 - DELETING = 4 - DOWN = 5 - - class HostType(proto.Enum): - r"""Enum containing list of all possible host types supported by - BeyondCorp Connection. - - Values: - HOST_TYPE_UNSPECIFIED (0): - Default value. This value is unused. - GCP_REGIONAL_MIG (1): - AppGateway hosted in a GCP regional managed - instance group. - """ - HOST_TYPE_UNSPECIFIED = 0 - GCP_REGIONAL_MIG = 1 - - class AllocatedConnection(proto.Message): - r"""Allocated connection of the AppGateway. - - Attributes: - psc_uri (str): - Required. The PSC uri of an allocated - connection - ingress_port (int): - Required. The ingress port of an allocated - connection - """ - - psc_uri: str = proto.Field( - proto.STRING, - number=1, - ) - ingress_port: int = proto.Field( - proto.INT32, - number=2, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - display_name: str = proto.Field( - proto.STRING, - number=5, - ) - uid: str = proto.Field( - proto.STRING, - number=6, - ) - type_: Type = proto.Field( - proto.ENUM, - number=7, - enum=Type, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - uri: str = proto.Field( - proto.STRING, - number=9, - ) - allocated_connections: MutableSequence[AllocatedConnection] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message=AllocatedConnection, - ) - host_type: HostType = proto.Field( - proto.ENUM, - number=11, - enum=HostType, - ) - - -class AppGatewayOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/mypy.ini b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/noxfile.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/noxfile.py deleted file mode 100644 index 9bee415057dd..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-beyondcorp-appgateways' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_appgateways_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_appgateways_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_create_app_gateway_async.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_create_app_gateway_async.py deleted file mode 100644 index bbf1301e37d6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_create_app_gateway_async.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAppGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_CreateAppGateway_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -async def sample_create_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - app_gateway = beyondcorp_appgateways_v1.AppGateway() - app_gateway.name = "name_value" - app_gateway.type_ = "TCP_PROXY" - app_gateway.host_type = "GCP_REGIONAL_MIG" - - request = beyondcorp_appgateways_v1.CreateAppGatewayRequest( - parent="parent_value", - app_gateway=app_gateway, - ) - - # Make the request - operation = client.create_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_CreateAppGateway_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_create_app_gateway_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_create_app_gateway_sync.py deleted file mode 100644 index 04441146d524..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_create_app_gateway_sync.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAppGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_CreateAppGateway_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -def sample_create_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - app_gateway = beyondcorp_appgateways_v1.AppGateway() - app_gateway.name = "name_value" - app_gateway.type_ = "TCP_PROXY" - app_gateway.host_type = "GCP_REGIONAL_MIG" - - request = beyondcorp_appgateways_v1.CreateAppGatewayRequest( - parent="parent_value", - app_gateway=app_gateway, - ) - - # Make the request - operation = client.create_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_CreateAppGateway_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_async.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_async.py deleted file mode 100644 index b9e9a6f83bdc..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAppGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_DeleteAppGateway_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -async def sample_delete_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.DeleteAppGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_DeleteAppGateway_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_sync.py deleted file mode 100644 index a2b2bfac35fb..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAppGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_DeleteAppGateway_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -def sample_delete_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.DeleteAppGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_app_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_DeleteAppGateway_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_get_app_gateway_async.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_get_app_gateway_async.py deleted file mode 100644 index 4a06dc8a35f6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_get_app_gateway_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAppGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_GetAppGateway_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -async def sample_get_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.GetAppGatewayRequest( - name="name_value", - ) - - # Make the request - response = await client.get_app_gateway(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_GetAppGateway_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_get_app_gateway_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_get_app_gateway_sync.py deleted file mode 100644 index c07026a65cf5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_get_app_gateway_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetAppGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_GetAppGateway_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -def sample_get_app_gateway(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.GetAppGatewayRequest( - name="name_value", - ) - - # Make the request - response = client.get_app_gateway(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_GetAppGateway_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_list_app_gateways_async.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_list_app_gateways_async.py deleted file mode 100644 index 182f47e8f649..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_list_app_gateways_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAppGateways -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_ListAppGateways_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -async def sample_list_app_gateways(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.ListAppGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_gateways(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_ListAppGateways_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_list_app_gateways_sync.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_list_app_gateways_sync.py deleted file mode 100644 index 076c1ba96a90..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/beyondcorp_v1_generated_app_gateways_service_list_app_gateways_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAppGateways -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-appgateways - - -# [START beyondcorp_v1_generated_AppGatewaysService_ListAppGateways_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_appgateways_v1 - - -def sample_list_app_gateways(): - # Create a client - client = beyondcorp_appgateways_v1.AppGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_appgateways_v1.ListAppGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_app_gateways(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_AppGatewaysService_ListAppGateways_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json deleted file mode 100644 index 0b3067b40e2b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json +++ /dev/null @@ -1,675 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.beyondcorp.appgateways.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-beyondcorp-appgateways", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient", - "shortName": "AppGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient.create_app_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.CreateAppGateway", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "CreateAppGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.CreateAppGatewayRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "app_gateway", - "type": "google.cloud.beyondcorp_appgateways_v1.types.AppGateway" - }, - { - "name": "app_gateway_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_app_gateway" - }, - "description": "Sample for CreateAppGateway", - "file": "beyondcorp_v1_generated_app_gateways_service_create_app_gateway_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_CreateAppGateway_async", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_create_app_gateway_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient", - "shortName": "AppGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient.create_app_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.CreateAppGateway", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "CreateAppGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.CreateAppGatewayRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "app_gateway", - "type": "google.cloud.beyondcorp_appgateways_v1.types.AppGateway" - }, - { - "name": "app_gateway_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_app_gateway" - }, - "description": "Sample for CreateAppGateway", - "file": "beyondcorp_v1_generated_app_gateways_service_create_app_gateway_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_CreateAppGateway_sync", - "segments": [ - { - "end": 61, - "start": 27, - "type": "FULL" - }, - { - "end": 61, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 58, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 62, - "start": 59, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_create_app_gateway_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient", - "shortName": "AppGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient.delete_app_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.DeleteAppGateway", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "DeleteAppGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.DeleteAppGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_app_gateway" - }, - "description": "Sample for DeleteAppGateway", - "file": "beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_DeleteAppGateway_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient", - "shortName": "AppGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient.delete_app_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.DeleteAppGateway", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "DeleteAppGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.DeleteAppGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_app_gateway" - }, - "description": "Sample for DeleteAppGateway", - "file": "beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_DeleteAppGateway_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_delete_app_gateway_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient", - "shortName": "AppGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient.get_app_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.GetAppGateway", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "GetAppGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.GetAppGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appgateways_v1.types.AppGateway", - "shortName": "get_app_gateway" - }, - "description": "Sample for GetAppGateway", - "file": "beyondcorp_v1_generated_app_gateways_service_get_app_gateway_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_GetAppGateway_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_get_app_gateway_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient", - "shortName": "AppGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient.get_app_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.GetAppGateway", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "GetAppGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.GetAppGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appgateways_v1.types.AppGateway", - "shortName": "get_app_gateway" - }, - "description": "Sample for GetAppGateway", - "file": "beyondcorp_v1_generated_app_gateways_service_get_app_gateway_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_GetAppGateway_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_get_app_gateway_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient", - "shortName": "AppGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceAsyncClient.list_app_gateways", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.ListAppGateways", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "ListAppGateways" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.pagers.ListAppGatewaysAsyncPager", - "shortName": "list_app_gateways" - }, - "description": "Sample for ListAppGateways", - "file": "beyondcorp_v1_generated_app_gateways_service_list_app_gateways_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_ListAppGateways_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_list_app_gateways_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient", - "shortName": "AppGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_appgateways_v1.AppGatewaysServiceClient.list_app_gateways", - "method": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService.ListAppGateways", - "service": { - "fullName": "google.cloud.beyondcorp.appgateways.v1.AppGatewaysService", - "shortName": "AppGatewaysService" - }, - "shortName": "ListAppGateways" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_appgateways_v1.types.ListAppGatewaysRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.pagers.ListAppGatewaysPager", - "shortName": "list_app_gateways" - }, - "description": "Sample for ListAppGateways", - "file": "beyondcorp_v1_generated_app_gateways_service_list_app_gateways_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_AppGatewaysService_ListAppGateways_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_app_gateways_service_list_app_gateways_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/scripts/fixup_beyondcorp_appgateways_v1_keywords.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/scripts/fixup_beyondcorp_appgateways_v1_keywords.py deleted file mode 100644 index f4b0d76e1f37..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/scripts/fixup_beyondcorp_appgateways_v1_keywords.py +++ /dev/null @@ -1,179 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class beyondcorp_appgatewaysCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_app_gateway': ('parent', 'app_gateway', 'app_gateway_id', 'request_id', 'validate_only', ), - 'delete_app_gateway': ('name', 'request_id', 'validate_only', ), - 'get_app_gateway': ('name', ), - 'list_app_gateways': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=beyondcorp_appgatewaysCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the beyondcorp_appgateways client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/setup.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/setup.py deleted file mode 100644 index 4400b8fd6565..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-beyondcorp-appgateways' - - -description = "Google Cloud Beyondcorp Appgateways API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/beyondcorp_appgateways/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-appgateways" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/beyondcorp_appgateways_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/beyondcorp_appgateways_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/beyondcorp_appgateways_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py b/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py deleted file mode 100644 index 62439bc17c80..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-appgateways/v1/tests/unit/gapic/beyondcorp_appgateways_v1/test_app_gateways_service.py +++ /dev/null @@ -1,6321 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service import AppGatewaysServiceAsyncClient -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service import AppGatewaysServiceClient -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service import pagers -from google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service import transports -from google.cloud.beyondcorp_appgateways_v1.types import app_gateways_service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AppGatewaysServiceClient._get_default_mtls_endpoint(None) is None - assert AppGatewaysServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AppGatewaysServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AppGatewaysServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AppGatewaysServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AppGatewaysServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AppGatewaysServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AppGatewaysServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AppGatewaysServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AppGatewaysServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AppGatewaysServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AppGatewaysServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AppGatewaysServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AppGatewaysServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AppGatewaysServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AppGatewaysServiceClient._get_client_cert_source(None, False) is None - assert AppGatewaysServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AppGatewaysServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AppGatewaysServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AppGatewaysServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AppGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceClient)) -@mock.patch.object(AppGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AppGatewaysServiceClient._DEFAULT_UNIVERSE - default_endpoint = AppGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AppGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AppGatewaysServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AppGatewaysServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AppGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppGatewaysServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AppGatewaysServiceClient._get_api_endpoint(None, None, default_universe, "always") == AppGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppGatewaysServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AppGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - assert AppGatewaysServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AppGatewaysServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AppGatewaysServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AppGatewaysServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AppGatewaysServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AppGatewaysServiceClient._get_universe_domain(None, None) == AppGatewaysServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AppGatewaysServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AppGatewaysServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AppGatewaysServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AppGatewaysServiceClient, "grpc"), - (AppGatewaysServiceAsyncClient, "grpc_asyncio"), - (AppGatewaysServiceClient, "rest"), -]) -def test_app_gateways_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AppGatewaysServiceGrpcTransport, "grpc"), - (transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.AppGatewaysServiceRestTransport, "rest"), -]) -def test_app_gateways_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AppGatewaysServiceClient, "grpc"), - (AppGatewaysServiceAsyncClient, "grpc_asyncio"), - (AppGatewaysServiceClient, "rest"), -]) -def test_app_gateways_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -def test_app_gateways_service_client_get_transport_class(): - transport = AppGatewaysServiceClient.get_transport_class() - available_transports = [ - transports.AppGatewaysServiceGrpcTransport, - transports.AppGatewaysServiceRestTransport, - ] - assert transport in available_transports - - transport = AppGatewaysServiceClient.get_transport_class("grpc") - assert transport == transports.AppGatewaysServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc"), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AppGatewaysServiceClient, transports.AppGatewaysServiceRestTransport, "rest"), -]) -@mock.patch.object(AppGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceClient)) -@mock.patch.object(AppGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceAsyncClient)) -def test_app_gateways_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AppGatewaysServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AppGatewaysServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc", "true"), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc", "false"), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (AppGatewaysServiceClient, transports.AppGatewaysServiceRestTransport, "rest", "true"), - (AppGatewaysServiceClient, transports.AppGatewaysServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(AppGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceClient)) -@mock.patch.object(AppGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_app_gateways_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AppGatewaysServiceClient, AppGatewaysServiceAsyncClient -]) -@mock.patch.object(AppGatewaysServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AppGatewaysServiceClient)) -@mock.patch.object(AppGatewaysServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AppGatewaysServiceAsyncClient)) -def test_app_gateways_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AppGatewaysServiceClient, AppGatewaysServiceAsyncClient -]) -@mock.patch.object(AppGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceClient)) -@mock.patch.object(AppGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AppGatewaysServiceAsyncClient)) -def test_app_gateways_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AppGatewaysServiceClient._DEFAULT_UNIVERSE - default_endpoint = AppGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AppGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc"), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (AppGatewaysServiceClient, transports.AppGatewaysServiceRestTransport, "rest"), -]) -def test_app_gateways_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc", grpc_helpers), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (AppGatewaysServiceClient, transports.AppGatewaysServiceRestTransport, "rest", None), -]) -def test_app_gateways_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_app_gateways_service_client_client_options_from_dict(): - with mock.patch('google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.transports.AppGatewaysServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AppGatewaysServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport, "grpc", grpc_helpers), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_app_gateways_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.ListAppGatewaysRequest, - dict, -]) -def test_list_app_gateways(request_type, transport: str = 'grpc'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_gateways_service.ListAppGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_gateways_service.ListAppGatewaysRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppGatewaysPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_app_gateways_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_gateways_service.ListAppGatewaysRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_app_gateways(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_gateways_service.ListAppGatewaysRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_app_gateways_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_app_gateways in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_app_gateways] = mock_rpc - request = {} - client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_app_gateways(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_app_gateways_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_app_gateways in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_app_gateways] = mock_rpc - - request = {} - await client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_app_gateways(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_app_gateways_async(transport: str = 'grpc_asyncio', request_type=app_gateways_service.ListAppGatewaysRequest): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.ListAppGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_gateways_service.ListAppGatewaysRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppGatewaysAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_app_gateways_async_from_dict(): - await test_list_app_gateways_async(request_type=dict) - -def test_list_app_gateways_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.ListAppGatewaysRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - call.return_value = app_gateways_service.ListAppGatewaysResponse() - client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_app_gateways_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.ListAppGatewaysRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.ListAppGatewaysResponse()) - await client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_app_gateways_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_gateways_service.ListAppGatewaysResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_app_gateways( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_app_gateways_flattened_error(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_app_gateways( - app_gateways_service.ListAppGatewaysRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_app_gateways_flattened_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_gateways_service.ListAppGatewaysResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.ListAppGatewaysResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_app_gateways( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_app_gateways_flattened_error_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_app_gateways( - app_gateways_service.ListAppGatewaysRequest(), - parent='parent_value', - ) - - -def test_list_app_gateways_pager(transport_name: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - next_page_token='abc', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[], - next_page_token='def', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - ], - next_page_token='ghi', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_app_gateways(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_gateways_service.AppGateway) - for i in results) -def test_list_app_gateways_pages(transport_name: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - next_page_token='abc', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[], - next_page_token='def', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - ], - next_page_token='ghi', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - ), - RuntimeError, - ) - pages = list(client.list_app_gateways(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_app_gateways_async_pager(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - next_page_token='abc', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[], - next_page_token='def', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - ], - next_page_token='ghi', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_app_gateways(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, app_gateways_service.AppGateway) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_app_gateways_async_pages(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - next_page_token='abc', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[], - next_page_token='def', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - ], - next_page_token='ghi', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_app_gateways(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.GetAppGatewayRequest, - dict, -]) -def test_get_app_gateway(request_type, transport: str = 'grpc'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_gateways_service.AppGateway( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_gateways_service.AppGateway.Type.TCP_PROXY, - state=app_gateways_service.AppGateway.State.CREATING, - uri='uri_value', - host_type=app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG, - ) - response = client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_gateways_service.GetAppGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, app_gateways_service.AppGateway) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.type_ == app_gateways_service.AppGateway.Type.TCP_PROXY - assert response.state == app_gateways_service.AppGateway.State.CREATING - assert response.uri == 'uri_value' - assert response.host_type == app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG - - -def test_get_app_gateway_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_gateways_service.GetAppGatewayRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_app_gateway(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_gateways_service.GetAppGatewayRequest( - name='name_value', - ) - -def test_get_app_gateway_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_app_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_app_gateway] = mock_rpc - request = {} - client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_app_gateway_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_app_gateway in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_app_gateway] = mock_rpc - - request = {} - await client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_app_gateway_async(transport: str = 'grpc_asyncio', request_type=app_gateways_service.GetAppGatewayRequest): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.AppGateway( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_gateways_service.AppGateway.Type.TCP_PROXY, - state=app_gateways_service.AppGateway.State.CREATING, - uri='uri_value', - host_type=app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG, - )) - response = await client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_gateways_service.GetAppGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, app_gateways_service.AppGateway) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.type_ == app_gateways_service.AppGateway.Type.TCP_PROXY - assert response.state == app_gateways_service.AppGateway.State.CREATING - assert response.uri == 'uri_value' - assert response.host_type == app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG - - -@pytest.mark.asyncio -async def test_get_app_gateway_async_from_dict(): - await test_get_app_gateway_async(request_type=dict) - -def test_get_app_gateway_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.GetAppGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - call.return_value = app_gateways_service.AppGateway() - client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_app_gateway_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.GetAppGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.AppGateway()) - await client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_app_gateway_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_gateways_service.AppGateway() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_app_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_app_gateway_flattened_error(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_app_gateway( - app_gateways_service.GetAppGatewayRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_app_gateway_flattened_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = app_gateways_service.AppGateway() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.AppGateway()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_app_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_app_gateway_flattened_error_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_app_gateway( - app_gateways_service.GetAppGatewayRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.CreateAppGatewayRequest, - dict, -]) -def test_create_app_gateway(request_type, transport: str = 'grpc'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_gateways_service.CreateAppGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_app_gateway_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_gateways_service.CreateAppGatewayRequest( - parent='parent_value', - app_gateway_id='app_gateway_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_app_gateway(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_gateways_service.CreateAppGatewayRequest( - parent='parent_value', - app_gateway_id='app_gateway_id_value', - request_id='request_id_value', - ) - -def test_create_app_gateway_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_app_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_app_gateway] = mock_rpc - request = {} - client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_app_gateway_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_app_gateway in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_app_gateway] = mock_rpc - - request = {} - await client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_app_gateway_async(transport: str = 'grpc_asyncio', request_type=app_gateways_service.CreateAppGatewayRequest): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_gateways_service.CreateAppGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_app_gateway_async_from_dict(): - await test_create_app_gateway_async(request_type=dict) - -def test_create_app_gateway_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.CreateAppGatewayRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_app_gateway_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.CreateAppGatewayRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_app_gateway_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_app_gateway( - parent='parent_value', - app_gateway=app_gateways_service.AppGateway(name='name_value'), - app_gateway_id='app_gateway_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].app_gateway - mock_val = app_gateways_service.AppGateway(name='name_value') - assert arg == mock_val - arg = args[0].app_gateway_id - mock_val = 'app_gateway_id_value' - assert arg == mock_val - - -def test_create_app_gateway_flattened_error(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_app_gateway( - app_gateways_service.CreateAppGatewayRequest(), - parent='parent_value', - app_gateway=app_gateways_service.AppGateway(name='name_value'), - app_gateway_id='app_gateway_id_value', - ) - -@pytest.mark.asyncio -async def test_create_app_gateway_flattened_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_app_gateway( - parent='parent_value', - app_gateway=app_gateways_service.AppGateway(name='name_value'), - app_gateway_id='app_gateway_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].app_gateway - mock_val = app_gateways_service.AppGateway(name='name_value') - assert arg == mock_val - arg = args[0].app_gateway_id - mock_val = 'app_gateway_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_app_gateway_flattened_error_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_app_gateway( - app_gateways_service.CreateAppGatewayRequest(), - parent='parent_value', - app_gateway=app_gateways_service.AppGateway(name='name_value'), - app_gateway_id='app_gateway_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.DeleteAppGatewayRequest, - dict, -]) -def test_delete_app_gateway(request_type, transport: str = 'grpc'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = app_gateways_service.DeleteAppGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_app_gateway_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = app_gateways_service.DeleteAppGatewayRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_app_gateway(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == app_gateways_service.DeleteAppGatewayRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_app_gateway_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_app_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_app_gateway] = mock_rpc - request = {} - client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_app_gateway_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_app_gateway in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_app_gateway] = mock_rpc - - request = {} - await client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_app_gateway_async(transport: str = 'grpc_asyncio', request_type=app_gateways_service.DeleteAppGatewayRequest): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = app_gateways_service.DeleteAppGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_app_gateway_async_from_dict(): - await test_delete_app_gateway_async(request_type=dict) - -def test_delete_app_gateway_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.DeleteAppGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_app_gateway_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = app_gateways_service.DeleteAppGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_app_gateway_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_app_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_app_gateway_flattened_error(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_app_gateway( - app_gateways_service.DeleteAppGatewayRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_app_gateway_flattened_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_app_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_app_gateway_flattened_error_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_app_gateway( - app_gateways_service.DeleteAppGatewayRequest(), - name='name_value', - ) - - -def test_list_app_gateways_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_app_gateways in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_app_gateways] = mock_rpc - - request = {} - client.list_app_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_app_gateways(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_app_gateways_rest_required_fields(request_type=app_gateways_service.ListAppGatewaysRequest): - transport_class = transports.AppGatewaysServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_app_gateways._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_app_gateways._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_gateways_service.ListAppGatewaysResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_gateways_service.ListAppGatewaysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_app_gateways(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_app_gateways_rest_unset_required_fields(): - transport = transports.AppGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_app_gateways._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_app_gateways_rest_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_gateways_service.ListAppGatewaysResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_gateways_service.ListAppGatewaysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_app_gateways(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appGateways" % client.transport._host, args[1]) - - -def test_list_app_gateways_rest_flattened_error(transport: str = 'rest'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_app_gateways( - app_gateways_service.ListAppGatewaysRequest(), - parent='parent_value', - ) - - -def test_list_app_gateways_rest_pager(transport: str = 'rest'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - next_page_token='abc', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[], - next_page_token='def', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - ], - next_page_token='ghi', - ), - app_gateways_service.ListAppGatewaysResponse( - app_gateways=[ - app_gateways_service.AppGateway(), - app_gateways_service.AppGateway(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(app_gateways_service.ListAppGatewaysResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_app_gateways(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, app_gateways_service.AppGateway) - for i in results) - - pages = list(client.list_app_gateways(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_app_gateway_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_app_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_app_gateway] = mock_rpc - - request = {} - client.get_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_app_gateway_rest_required_fields(request_type=app_gateways_service.GetAppGatewayRequest): - transport_class = transports.AppGatewaysServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_app_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_app_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = app_gateways_service.AppGateway() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_gateways_service.AppGateway.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_app_gateway(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_app_gateway_rest_unset_required_fields(): - transport = transports.AppGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_app_gateway._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_app_gateway_rest_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_gateways_service.AppGateway() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/appGateways/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = app_gateways_service.AppGateway.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_app_gateway(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/appGateways/*}" % client.transport._host, args[1]) - - -def test_get_app_gateway_rest_flattened_error(transport: str = 'rest'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_app_gateway( - app_gateways_service.GetAppGatewayRequest(), - name='name_value', - ) - - -def test_create_app_gateway_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_app_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_app_gateway] = mock_rpc - - request = {} - client.create_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_app_gateway_rest_required_fields(request_type=app_gateways_service.CreateAppGatewayRequest): - transport_class = transports.AppGatewaysServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_app_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_app_gateway._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("app_gateway_id", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_app_gateway(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_app_gateway_rest_unset_required_fields(): - transport = transports.AppGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_app_gateway._get_unset_required_fields({}) - assert set(unset_fields) == (set(("appGatewayId", "requestId", "validateOnly", )) & set(("parent", "appGateway", ))) - - -def test_create_app_gateway_rest_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - app_gateway=app_gateways_service.AppGateway(name='name_value'), - app_gateway_id='app_gateway_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_app_gateway(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/appGateways" % client.transport._host, args[1]) - - -def test_create_app_gateway_rest_flattened_error(transport: str = 'rest'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_app_gateway( - app_gateways_service.CreateAppGatewayRequest(), - parent='parent_value', - app_gateway=app_gateways_service.AppGateway(name='name_value'), - app_gateway_id='app_gateway_id_value', - ) - - -def test_delete_app_gateway_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_app_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_app_gateway] = mock_rpc - - request = {} - client.delete_app_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_app_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_app_gateway_rest_required_fields(request_type=app_gateways_service.DeleteAppGatewayRequest): - transport_class = transports.AppGatewaysServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_app_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_app_gateway._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_app_gateway(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_app_gateway_rest_unset_required_fields(): - transport = transports.AppGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_app_gateway._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("name", ))) - - -def test_delete_app_gateway_rest_flattened(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/appGateways/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_app_gateway(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/appGateways/*}" % client.transport._host, args[1]) - - -def test_delete_app_gateway_rest_flattened_error(transport: str = 'rest'): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_app_gateway( - app_gateways_service.DeleteAppGatewayRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AppGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AppGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppGatewaysServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AppGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AppGatewaysServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AppGatewaysServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AppGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AppGatewaysServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AppGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AppGatewaysServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AppGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AppGatewaysServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AppGatewaysServiceGrpcTransport, - transports.AppGatewaysServiceGrpcAsyncIOTransport, - transports.AppGatewaysServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AppGatewaysServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_app_gateways_empty_call_grpc(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - call.return_value = app_gateways_service.ListAppGatewaysResponse() - client.list_app_gateways(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.ListAppGatewaysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_app_gateway_empty_call_grpc(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - call.return_value = app_gateways_service.AppGateway() - client.get_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.GetAppGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_app_gateway_empty_call_grpc(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.CreateAppGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_app_gateway_empty_call_grpc(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.DeleteAppGatewayRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AppGatewaysServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_app_gateways_empty_call_grpc_asyncio(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.ListAppGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_app_gateways(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.ListAppGatewaysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_app_gateway_empty_call_grpc_asyncio(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(app_gateways_service.AppGateway( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_gateways_service.AppGateway.Type.TCP_PROXY, - state=app_gateways_service.AppGateway.State.CREATING, - uri='uri_value', - host_type=app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG, - )) - await client.get_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.GetAppGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_app_gateway_empty_call_grpc_asyncio(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.CreateAppGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_app_gateway_empty_call_grpc_asyncio(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.DeleteAppGatewayRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = AppGatewaysServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_app_gateways_rest_bad_request(request_type=app_gateways_service.ListAppGatewaysRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_app_gateways(request) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.ListAppGatewaysRequest, - dict, -]) -def test_list_app_gateways_rest_call_success(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_gateways_service.ListAppGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_gateways_service.ListAppGatewaysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_app_gateways(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAppGatewaysPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_app_gateways_rest_interceptors(null_interceptor): - transport = transports.AppGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppGatewaysServiceRestInterceptor(), - ) - client = AppGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_list_app_gateways") as post, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_list_app_gateways_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "pre_list_app_gateways") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_gateways_service.ListAppGatewaysRequest.pb(app_gateways_service.ListAppGatewaysRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_gateways_service.ListAppGatewaysResponse.to_json(app_gateways_service.ListAppGatewaysResponse()) - req.return_value.content = return_value - - request = app_gateways_service.ListAppGatewaysRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_gateways_service.ListAppGatewaysResponse() - post_with_metadata.return_value = app_gateways_service.ListAppGatewaysResponse(), metadata - - client.list_app_gateways(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_app_gateway_rest_bad_request(request_type=app_gateways_service.GetAppGatewayRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_app_gateway(request) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.GetAppGatewayRequest, - dict, -]) -def test_get_app_gateway_rest_call_success(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = app_gateways_service.AppGateway( - name='name_value', - display_name='display_name_value', - uid='uid_value', - type_=app_gateways_service.AppGateway.Type.TCP_PROXY, - state=app_gateways_service.AppGateway.State.CREATING, - uri='uri_value', - host_type=app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = app_gateways_service.AppGateway.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_app_gateway(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, app_gateways_service.AppGateway) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.uid == 'uid_value' - assert response.type_ == app_gateways_service.AppGateway.Type.TCP_PROXY - assert response.state == app_gateways_service.AppGateway.State.CREATING - assert response.uri == 'uri_value' - assert response.host_type == app_gateways_service.AppGateway.HostType.GCP_REGIONAL_MIG - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_app_gateway_rest_interceptors(null_interceptor): - transport = transports.AppGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppGatewaysServiceRestInterceptor(), - ) - client = AppGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_get_app_gateway") as post, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_get_app_gateway_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "pre_get_app_gateway") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_gateways_service.GetAppGatewayRequest.pb(app_gateways_service.GetAppGatewayRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = app_gateways_service.AppGateway.to_json(app_gateways_service.AppGateway()) - req.return_value.content = return_value - - request = app_gateways_service.GetAppGatewayRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = app_gateways_service.AppGateway() - post_with_metadata.return_value = app_gateways_service.AppGateway(), metadata - - client.get_app_gateway(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_app_gateway_rest_bad_request(request_type=app_gateways_service.CreateAppGatewayRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_app_gateway(request) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.CreateAppGatewayRequest, - dict, -]) -def test_create_app_gateway_rest_call_success(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["app_gateway"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'labels': {}, 'display_name': 'display_name_value', 'uid': 'uid_value', 'type_': 1, 'state': 1, 'uri': 'uri_value', 'allocated_connections': [{'psc_uri': 'psc_uri_value', 'ingress_port': 1311}], 'host_type': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = app_gateways_service.CreateAppGatewayRequest.meta.fields["app_gateway"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["app_gateway"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["app_gateway"][field])): - del request_init["app_gateway"][field][i][subfield] - else: - del request_init["app_gateway"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_app_gateway(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_app_gateway_rest_interceptors(null_interceptor): - transport = transports.AppGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppGatewaysServiceRestInterceptor(), - ) - client = AppGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_create_app_gateway") as post, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_create_app_gateway_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "pre_create_app_gateway") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_gateways_service.CreateAppGatewayRequest.pb(app_gateways_service.CreateAppGatewayRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_gateways_service.CreateAppGatewayRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_app_gateway(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_app_gateway_rest_bad_request(request_type=app_gateways_service.DeleteAppGatewayRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_app_gateway(request) - - -@pytest.mark.parametrize("request_type", [ - app_gateways_service.DeleteAppGatewayRequest, - dict, -]) -def test_delete_app_gateway_rest_call_success(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/appGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_app_gateway(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_app_gateway_rest_interceptors(null_interceptor): - transport = transports.AppGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AppGatewaysServiceRestInterceptor(), - ) - client = AppGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_delete_app_gateway") as post, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "post_delete_app_gateway_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.AppGatewaysServiceRestInterceptor, "pre_delete_app_gateway") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = app_gateways_service.DeleteAppGatewayRequest.pb(app_gateways_service.DeleteAppGatewayRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = app_gateways_service.DeleteAppGatewayRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_app_gateway(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_app_gateways_empty_call_rest(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_app_gateways), - '__call__') as call: - client.list_app_gateways(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.ListAppGatewaysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_app_gateway_empty_call_rest(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_app_gateway), - '__call__') as call: - client.get_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.GetAppGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_app_gateway_empty_call_rest(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_app_gateway), - '__call__') as call: - client.create_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.CreateAppGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_app_gateway_empty_call_rest(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_app_gateway), - '__call__') as call: - client.delete_app_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = app_gateways_service.DeleteAppGatewayRequest() - - assert args[0] == request_msg - - -def test_app_gateways_service_rest_lro_client(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AppGatewaysServiceGrpcTransport, - ) - -def test_app_gateways_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AppGatewaysServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_app_gateways_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.transports.AppGatewaysServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AppGatewaysServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_app_gateways', - 'get_app_gateway', - 'create_app_gateway', - 'delete_app_gateway', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_app_gateways_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.transports.AppGatewaysServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AppGatewaysServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_app_gateways_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.beyondcorp_appgateways_v1.services.app_gateways_service.transports.AppGatewaysServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AppGatewaysServiceTransport() - adc.assert_called_once() - - -def test_app_gateways_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AppGatewaysServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AppGatewaysServiceGrpcTransport, - transports.AppGatewaysServiceGrpcAsyncIOTransport, - ], -) -def test_app_gateways_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AppGatewaysServiceGrpcTransport, - transports.AppGatewaysServiceGrpcAsyncIOTransport, - transports.AppGatewaysServiceRestTransport, - ], -) -def test_app_gateways_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AppGatewaysServiceGrpcTransport, grpc_helpers), - (transports.AppGatewaysServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_app_gateways_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AppGatewaysServiceGrpcTransport, transports.AppGatewaysServiceGrpcAsyncIOTransport]) -def test_app_gateways_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_app_gateways_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.AppGatewaysServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_app_gateways_service_host_no_port(transport_name): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_app_gateways_service_host_with_port(transport_name): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_app_gateways_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = AppGatewaysServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = AppGatewaysServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_app_gateways._session - session2 = client2.transport.list_app_gateways._session - assert session1 != session2 - session1 = client1.transport.get_app_gateway._session - session2 = client2.transport.get_app_gateway._session - assert session1 != session2 - session1 = client1.transport.create_app_gateway._session - session2 = client2.transport.create_app_gateway._session - assert session1 != session2 - session1 = client1.transport.delete_app_gateway._session - session2 = client2.transport.delete_app_gateway._session - assert session1 != session2 -def test_app_gateways_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AppGatewaysServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_app_gateways_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AppGatewaysServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AppGatewaysServiceGrpcTransport, transports.AppGatewaysServiceGrpcAsyncIOTransport]) -def test_app_gateways_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AppGatewaysServiceGrpcTransport, transports.AppGatewaysServiceGrpcAsyncIOTransport]) -def test_app_gateways_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_app_gateways_service_grpc_lro_client(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_app_gateways_service_grpc_lro_async_client(): - client = AppGatewaysServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_app_gateway_path(): - project = "squid" - location = "clam" - app_gateway = "whelk" - expected = "projects/{project}/locations/{location}/appGateways/{app_gateway}".format(project=project, location=location, app_gateway=app_gateway, ) - actual = AppGatewaysServiceClient.app_gateway_path(project, location, app_gateway) - assert expected == actual - - -def test_parse_app_gateway_path(): - expected = { - "project": "octopus", - "location": "oyster", - "app_gateway": "nudibranch", - } - path = AppGatewaysServiceClient.app_gateway_path(**expected) - - # Check that the path construction is reversible. - actual = AppGatewaysServiceClient.parse_app_gateway_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AppGatewaysServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AppGatewaysServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AppGatewaysServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AppGatewaysServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AppGatewaysServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AppGatewaysServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AppGatewaysServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AppGatewaysServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AppGatewaysServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AppGatewaysServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AppGatewaysServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AppGatewaysServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AppGatewaysServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AppGatewaysServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AppGatewaysServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AppGatewaysServiceTransport, '_prep_wrapped_messages') as prep: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AppGatewaysServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AppGatewaysServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AppGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = AppGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AppGatewaysServiceClient, transports.AppGatewaysServiceGrpcTransport), - (AppGatewaysServiceAsyncClient, transports.AppGatewaysServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/.coveragerc b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/.coveragerc deleted file mode 100644 index e700a77b128c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/beyondcorp_clientconnectorservices/__init__.py - google/cloud/beyondcorp_clientconnectorservices/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/.flake8 b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/MANIFEST.in b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/MANIFEST.in deleted file mode 100644 index 07cb8305a818..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/beyondcorp_clientconnectorservices *.py -recursive-include google/cloud/beyondcorp_clientconnectorservices_v1 *.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/README.rst b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/README.rst deleted file mode 100644 index f01d7fc78c5b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Beyondcorp Clientconnectorservices API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Beyondcorp Clientconnectorservices API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/client_connector_services_service.rst b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/client_connector_services_service.rst deleted file mode 100644 index 0b82e1f69293..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/client_connector_services_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ClientConnectorServicesService ------------------------------------------------- - -.. automodule:: google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service - :members: - :inherited-members: - -.. automodule:: google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/services_.rst b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/services_.rst deleted file mode 100644 index e1f794ccb432..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Beyondcorp Clientconnectorservices v1 API -=================================================================== -.. toctree:: - :maxdepth: 2 - - client_connector_services_service diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/types_.rst b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/types_.rst deleted file mode 100644 index 9613ea9f7668..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/beyondcorp_clientconnectorservices_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Beyondcorp Clientconnectorservices v1 API -================================================================ - -.. automodule:: google.cloud.beyondcorp_clientconnectorservices_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/conf.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/conf.py deleted file mode 100644 index 190d8ab820de..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-beyondcorp-clientconnectorservices documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-beyondcorp-clientconnectorservices" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-beyondcorp-clientconnectorservices-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-clientconnectorservices.tex", - u"google-cloud-beyondcorp-clientconnectorservices Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-beyondcorp-clientconnectorservices", - u"Google Cloud Beyondcorp Clientconnectorservices Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-clientconnectorservices", - u"google-cloud-beyondcorp-clientconnectorservices Documentation", - author, - "google-cloud-beyondcorp-clientconnectorservices", - "GAPIC library for Google Cloud Beyondcorp Clientconnectorservices API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/index.rst b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/index.rst deleted file mode 100644 index 97327383cc69..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - beyondcorp_clientconnectorservices_v1/services_ - beyondcorp_clientconnectorservices_v1/types_ diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/__init__.py deleted file mode 100644 index fc7ae8d8e63e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_clientconnectorservices import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.client import ClientConnectorServicesServiceClient -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.async_client import ClientConnectorServicesServiceAsyncClient - -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import ClientConnectorService -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import ClientConnectorServiceOperationMetadata -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import CreateClientConnectorServiceRequest -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import DeleteClientConnectorServiceRequest -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import GetClientConnectorServiceRequest -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import ListClientConnectorServicesRequest -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import ListClientConnectorServicesResponse -from google.cloud.beyondcorp_clientconnectorservices_v1.types.client_connector_services_service import UpdateClientConnectorServiceRequest - -__all__ = ('ClientConnectorServicesServiceClient', - 'ClientConnectorServicesServiceAsyncClient', - 'ClientConnectorService', - 'ClientConnectorServiceOperationMetadata', - 'CreateClientConnectorServiceRequest', - 'DeleteClientConnectorServiceRequest', - 'GetClientConnectorServiceRequest', - 'ListClientConnectorServicesRequest', - 'ListClientConnectorServicesResponse', - 'UpdateClientConnectorServiceRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/py.typed b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/py.typed deleted file mode 100644 index e76239a03894..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-clientconnectorservices package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/__init__.py deleted file mode 100644 index dd7f405ba9af..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_clientconnectorservices_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.client_connector_services_service import ClientConnectorServicesServiceClient -from .services.client_connector_services_service import ClientConnectorServicesServiceAsyncClient - -from .types.client_connector_services_service import ClientConnectorService -from .types.client_connector_services_service import ClientConnectorServiceOperationMetadata -from .types.client_connector_services_service import CreateClientConnectorServiceRequest -from .types.client_connector_services_service import DeleteClientConnectorServiceRequest -from .types.client_connector_services_service import GetClientConnectorServiceRequest -from .types.client_connector_services_service import ListClientConnectorServicesRequest -from .types.client_connector_services_service import ListClientConnectorServicesResponse -from .types.client_connector_services_service import UpdateClientConnectorServiceRequest - -__all__ = ( - 'ClientConnectorServicesServiceAsyncClient', -'ClientConnectorService', -'ClientConnectorServiceOperationMetadata', -'ClientConnectorServicesServiceClient', -'CreateClientConnectorServiceRequest', -'DeleteClientConnectorServiceRequest', -'GetClientConnectorServiceRequest', -'ListClientConnectorServicesRequest', -'ListClientConnectorServicesResponse', -'UpdateClientConnectorServiceRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_metadata.json deleted file mode 100644 index e63a0057820d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.beyondcorp_clientconnectorservices_v1", - "protoPackage": "google.cloud.beyondcorp.clientconnectorservices.v1", - "schema": "1.0", - "services": { - "ClientConnectorServicesService": { - "clients": { - "grpc": { - "libraryClient": "ClientConnectorServicesServiceClient", - "rpcs": { - "CreateClientConnectorService": { - "methods": [ - "create_client_connector_service" - ] - }, - "DeleteClientConnectorService": { - "methods": [ - "delete_client_connector_service" - ] - }, - "GetClientConnectorService": { - "methods": [ - "get_client_connector_service" - ] - }, - "ListClientConnectorServices": { - "methods": [ - "list_client_connector_services" - ] - }, - "UpdateClientConnectorService": { - "methods": [ - "update_client_connector_service" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ClientConnectorServicesServiceAsyncClient", - "rpcs": { - "CreateClientConnectorService": { - "methods": [ - "create_client_connector_service" - ] - }, - "DeleteClientConnectorService": { - "methods": [ - "delete_client_connector_service" - ] - }, - "GetClientConnectorService": { - "methods": [ - "get_client_connector_service" - ] - }, - "ListClientConnectorServices": { - "methods": [ - "list_client_connector_services" - ] - }, - "UpdateClientConnectorService": { - "methods": [ - "update_client_connector_service" - ] - } - } - }, - "rest": { - "libraryClient": "ClientConnectorServicesServiceClient", - "rpcs": { - "CreateClientConnectorService": { - "methods": [ - "create_client_connector_service" - ] - }, - "DeleteClientConnectorService": { - "methods": [ - "delete_client_connector_service" - ] - }, - "GetClientConnectorService": { - "methods": [ - "get_client_connector_service" - ] - }, - "ListClientConnectorServices": { - "methods": [ - "list_client_connector_services" - ] - }, - "UpdateClientConnectorService": { - "methods": [ - "update_client_connector_service" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/py.typed b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/py.typed deleted file mode 100644 index e76239a03894..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-clientconnectorservices package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/__init__.py deleted file mode 100644 index 160850a4e5fc..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ClientConnectorServicesServiceClient -from .async_client import ClientConnectorServicesServiceAsyncClient - -__all__ = ( - 'ClientConnectorServicesServiceClient', - 'ClientConnectorServicesServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py deleted file mode 100644 index 909cecabdb5e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py +++ /dev/null @@ -1,1535 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.beyondcorp_clientconnectorservices_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service import pagers -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ClientConnectorServicesServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ClientConnectorServicesServiceGrpcAsyncIOTransport -from .client import ClientConnectorServicesServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class ClientConnectorServicesServiceAsyncClient: - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientConnectorServicesService exposes the following resources: - - - Client Connector Services, named as follows: - ``projects/{project_id}/locations/{location_id}/client_connector_services/{client_connector_service_id}``. - """ - - _client: ClientConnectorServicesServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = ClientConnectorServicesServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ClientConnectorServicesServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ClientConnectorServicesServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - - client_connector_service_path = staticmethod(ClientConnectorServicesServiceClient.client_connector_service_path) - parse_client_connector_service_path = staticmethod(ClientConnectorServicesServiceClient.parse_client_connector_service_path) - common_billing_account_path = staticmethod(ClientConnectorServicesServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ClientConnectorServicesServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ClientConnectorServicesServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ClientConnectorServicesServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ClientConnectorServicesServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ClientConnectorServicesServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ClientConnectorServicesServiceClient.common_project_path) - parse_common_project_path = staticmethod(ClientConnectorServicesServiceClient.parse_common_project_path) - common_location_path = staticmethod(ClientConnectorServicesServiceClient.common_location_path) - parse_common_location_path = staticmethod(ClientConnectorServicesServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientConnectorServicesServiceAsyncClient: The constructed client. - """ - return ClientConnectorServicesServiceClient.from_service_account_info.__func__(ClientConnectorServicesServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientConnectorServicesServiceAsyncClient: The constructed client. - """ - return ClientConnectorServicesServiceClient.from_service_account_file.__func__(ClientConnectorServicesServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ClientConnectorServicesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ClientConnectorServicesServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ClientConnectorServicesServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = ClientConnectorServicesServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ClientConnectorServicesServiceTransport, Callable[..., ClientConnectorServicesServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the client connector services service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ClientConnectorServicesServiceTransport,Callable[..., ClientConnectorServicesServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ClientConnectorServicesServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ClientConnectorServicesServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "credentialsType": None, - } - ) - - async def list_client_connector_services(self, - request: Optional[Union[client_connector_services_service.ListClientConnectorServicesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListClientConnectorServicesAsyncPager: - r"""Lists ClientConnectorServices in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - async def sample_list_client_connector_services(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.ListClientConnectorServicesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_connector_services(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesRequest, dict]]): - The request object. Message for requesting list of - ClientConnectorServices. - parent (:class:`str`): - Required. Parent value for - ListClientConnectorServicesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.pagers.ListClientConnectorServicesAsyncPager: - Message for response to listing - ClientConnectorServices. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.ListClientConnectorServicesRequest): - request = client_connector_services_service.ListClientConnectorServicesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_client_connector_services] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListClientConnectorServicesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_client_connector_service(self, - request: Optional[Union[client_connector_services_service.GetClientConnectorServiceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_connector_services_service.ClientConnectorService: - r"""Gets details of a single ClientConnectorService. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - async def sample_get_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.GetClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_client_connector_service(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.GetClientConnectorServiceRequest, dict]]): - The request object. Message for getting a - ClientConnectorService. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService: - Message describing - ClientConnectorService object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.GetClientConnectorServiceRequest): - request = client_connector_services_service.GetClientConnectorServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_client_connector_service(self, - request: Optional[Union[client_connector_services_service.CreateClientConnectorServiceRequest, dict]] = None, - *, - parent: Optional[str] = None, - client_connector_service: Optional[client_connector_services_service.ClientConnectorService] = None, - client_connector_service_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new ClientConnectorService in a given - project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - async def sample_create_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.CreateClientConnectorServiceRequest( - parent="parent_value", - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.create_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.CreateClientConnectorServiceRequest, dict]]): - The request object. Message for creating a - ClientConnectorService. - parent (:class:`str`): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_connector_service (:class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService`): - Required. The resource being created. - This corresponds to the ``client_connector_service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_connector_service_id (:class:`str`): - Optional. User-settable client connector service - resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - A random system generated name will be assigned if not - specified by the user. - - This corresponds to the ``client_connector_service_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService` - Message describing ClientConnectorService object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, client_connector_service, client_connector_service_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.CreateClientConnectorServiceRequest): - request = client_connector_services_service.CreateClientConnectorServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if client_connector_service is not None: - request.client_connector_service = client_connector_service - if client_connector_service_id is not None: - request.client_connector_service_id = client_connector_service_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - client_connector_services_service.ClientConnectorService, - metadata_type=client_connector_services_service.ClientConnectorServiceOperationMetadata, - ) - - # Done; return the response. - return response - - async def update_client_connector_service(self, - request: Optional[Union[client_connector_services_service.UpdateClientConnectorServiceRequest, dict]] = None, - *, - client_connector_service: Optional[client_connector_services_service.ClientConnectorService] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Updates the parameters of a single - ClientConnectorService. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - async def sample_update_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.UpdateClientConnectorServiceRequest( - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.update_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.UpdateClientConnectorServiceRequest, dict]]): - The request object. Message for updating a - ClientConnectorService - client_connector_service (:class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService`): - Required. The resource being updated. - This corresponds to the ``client_connector_service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask is used to specify the fields to be - overwritten in the ClientConnectorService resource by - the update. The fields specified in the update_mask are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. - - Mutable fields: display_name. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService` - Message describing ClientConnectorService object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [client_connector_service, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.UpdateClientConnectorServiceRequest): - request = client_connector_services_service.UpdateClientConnectorServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if client_connector_service is not None: - request.client_connector_service = client_connector_service - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("client_connector_service.name", request.client_connector_service.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - client_connector_services_service.ClientConnectorService, - metadata_type=client_connector_services_service.ClientConnectorServiceOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_client_connector_service(self, - request: Optional[Union[client_connector_services_service.DeleteClientConnectorServiceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single ClientConnectorService. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - async def sample_delete_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.DeleteClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.DeleteClientConnectorServiceRequest, dict]]): - The request object. Message for deleting a - ClientConnectorService. - name (:class:`str`): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.DeleteClientConnectorServiceRequest): - request = client_connector_services_service.DeleteClientConnectorServiceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=client_connector_services_service.ClientConnectorServiceOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ClientConnectorServicesServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ClientConnectorServicesServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py deleted file mode 100644 index 87c0af944b1b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py +++ /dev/null @@ -1,1915 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.beyondcorp_clientconnectorservices_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service import pagers -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ClientConnectorServicesServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ClientConnectorServicesServiceGrpcTransport -from .transports.grpc_asyncio import ClientConnectorServicesServiceGrpcAsyncIOTransport -from .transports.rest import ClientConnectorServicesServiceRestTransport - - -class ClientConnectorServicesServiceClientMeta(type): - """Metaclass for the ClientConnectorServicesService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ClientConnectorServicesServiceTransport]] - _transport_registry["grpc"] = ClientConnectorServicesServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ClientConnectorServicesServiceGrpcAsyncIOTransport - _transport_registry["rest"] = ClientConnectorServicesServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ClientConnectorServicesServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ClientConnectorServicesServiceClient(metaclass=ClientConnectorServicesServiceClientMeta): - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientConnectorServicesService exposes the following resources: - - - Client Connector Services, named as follows: - ``projects/{project_id}/locations/{location_id}/client_connector_services/{client_connector_service_id}``. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "beyondcorp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "beyondcorp.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientConnectorServicesServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientConnectorServicesServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ClientConnectorServicesServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ClientConnectorServicesServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def client_connector_service_path(project: str,location: str,client_connector_service: str,) -> str: - """Returns a fully-qualified client_connector_service string.""" - return "projects/{project}/locations/{location}/clientConnectorServices/{client_connector_service}".format(project=project, location=location, client_connector_service=client_connector_service, ) - - @staticmethod - def parse_client_connector_service_path(path: str) -> Dict[str,str]: - """Parses a client_connector_service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/clientConnectorServices/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = ClientConnectorServicesServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ClientConnectorServicesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ClientConnectorServicesServiceTransport, Callable[..., ClientConnectorServicesServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the client connector services service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ClientConnectorServicesServiceTransport,Callable[..., ClientConnectorServicesServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ClientConnectorServicesServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ClientConnectorServicesServiceClient._read_environment_variables() - self._client_cert_source = ClientConnectorServicesServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ClientConnectorServicesServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ClientConnectorServicesServiceTransport) - if transport_provided: - # transport is a ClientConnectorServicesServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ClientConnectorServicesServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - ClientConnectorServicesServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[ClientConnectorServicesServiceTransport], Callable[..., ClientConnectorServicesServiceTransport]] = ( - ClientConnectorServicesServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ClientConnectorServicesServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "credentialsType": None, - } - ) - - def list_client_connector_services(self, - request: Optional[Union[client_connector_services_service.ListClientConnectorServicesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListClientConnectorServicesPager: - r"""Lists ClientConnectorServices in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - def sample_list_client_connector_services(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.ListClientConnectorServicesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_connector_services(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesRequest, dict]): - The request object. Message for requesting list of - ClientConnectorServices. - parent (str): - Required. Parent value for - ListClientConnectorServicesRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.pagers.ListClientConnectorServicesPager: - Message for response to listing - ClientConnectorServices. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.ListClientConnectorServicesRequest): - request = client_connector_services_service.ListClientConnectorServicesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_client_connector_services] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListClientConnectorServicesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_client_connector_service(self, - request: Optional[Union[client_connector_services_service.GetClientConnectorServiceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_connector_services_service.ClientConnectorService: - r"""Gets details of a single ClientConnectorService. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - def sample_get_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.GetClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - response = client.get_client_connector_service(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.GetClientConnectorServiceRequest, dict]): - The request object. Message for getting a - ClientConnectorService. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService: - Message describing - ClientConnectorService object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.GetClientConnectorServiceRequest): - request = client_connector_services_service.GetClientConnectorServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_client_connector_service(self, - request: Optional[Union[client_connector_services_service.CreateClientConnectorServiceRequest, dict]] = None, - *, - parent: Optional[str] = None, - client_connector_service: Optional[client_connector_services_service.ClientConnectorService] = None, - client_connector_service_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new ClientConnectorService in a given - project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - def sample_create_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.CreateClientConnectorServiceRequest( - parent="parent_value", - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.create_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.CreateClientConnectorServiceRequest, dict]): - The request object. Message for creating a - ClientConnectorService. - parent (str): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_connector_service (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService): - Required. The resource being created. - This corresponds to the ``client_connector_service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_connector_service_id (str): - Optional. User-settable client connector service - resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - A random system generated name will be assigned if not - specified by the user. - - This corresponds to the ``client_connector_service_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService` - Message describing ClientConnectorService object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, client_connector_service, client_connector_service_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.CreateClientConnectorServiceRequest): - request = client_connector_services_service.CreateClientConnectorServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if client_connector_service is not None: - request.client_connector_service = client_connector_service - if client_connector_service_id is not None: - request.client_connector_service_id = client_connector_service_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - client_connector_services_service.ClientConnectorService, - metadata_type=client_connector_services_service.ClientConnectorServiceOperationMetadata, - ) - - # Done; return the response. - return response - - def update_client_connector_service(self, - request: Optional[Union[client_connector_services_service.UpdateClientConnectorServiceRequest, dict]] = None, - *, - client_connector_service: Optional[client_connector_services_service.ClientConnectorService] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Updates the parameters of a single - ClientConnectorService. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - def sample_update_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.UpdateClientConnectorServiceRequest( - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.update_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.UpdateClientConnectorServiceRequest, dict]): - The request object. Message for updating a - ClientConnectorService - client_connector_service (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService): - Required. The resource being updated. - This corresponds to the ``client_connector_service`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the ClientConnectorService resource by - the update. The fields specified in the update_mask are - relative to the resource, not the full request. A field - will be overwritten if it is in the mask. If the user - does not provide a mask then all fields will be - overwritten. - - Mutable fields: display_name. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService` - Message describing ClientConnectorService object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [client_connector_service, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.UpdateClientConnectorServiceRequest): - request = client_connector_services_service.UpdateClientConnectorServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if client_connector_service is not None: - request.client_connector_service = client_connector_service - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("client_connector_service.name", request.client_connector_service.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - client_connector_services_service.ClientConnectorService, - metadata_type=client_connector_services_service.ClientConnectorServiceOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_client_connector_service(self, - request: Optional[Union[client_connector_services_service.DeleteClientConnectorServiceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single ClientConnectorService. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientconnectorservices_v1 - - def sample_delete_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.DeleteClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientconnectorservices_v1.types.DeleteClientConnectorServiceRequest, dict]): - The request object. Message for deleting a - ClientConnectorService. - name (str): - Required. Name of the resource. - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_connector_services_service.DeleteClientConnectorServiceRequest): - request = client_connector_services_service.DeleteClientConnectorServiceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_client_connector_service] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=client_connector_services_service.ClientConnectorServiceOperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ClientConnectorServicesServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ClientConnectorServicesServiceClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/pagers.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/pagers.py deleted file mode 100644 index f4d53387e202..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service - - -class ListClientConnectorServicesPager: - """A pager for iterating through ``list_client_connector_services`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``client_connector_services`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListClientConnectorServices`` requests and continue to iterate - through the ``client_connector_services`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., client_connector_services_service.ListClientConnectorServicesResponse], - request: client_connector_services_service.ListClientConnectorServicesRequest, - response: client_connector_services_service.ListClientConnectorServicesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesRequest): - The initial request object. - response (google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = client_connector_services_service.ListClientConnectorServicesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[client_connector_services_service.ListClientConnectorServicesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[client_connector_services_service.ClientConnectorService]: - for page in self.pages: - yield from page.client_connector_services - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListClientConnectorServicesAsyncPager: - """A pager for iterating through ``list_client_connector_services`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``client_connector_services`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListClientConnectorServices`` requests and continue to iterate - through the ``client_connector_services`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[client_connector_services_service.ListClientConnectorServicesResponse]], - request: client_connector_services_service.ListClientConnectorServicesRequest, - response: client_connector_services_service.ListClientConnectorServicesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesRequest): - The initial request object. - response (google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = client_connector_services_service.ListClientConnectorServicesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[client_connector_services_service.ListClientConnectorServicesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[client_connector_services_service.ClientConnectorService]: - async def async_generator(): - async for page in self.pages: - for response in page.client_connector_services: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/README.rst b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/README.rst deleted file mode 100644 index e7210f9e9197..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`ClientConnectorServicesServiceTransport` is the ABC for all transports. -- public child `ClientConnectorServicesServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ClientConnectorServicesServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseClientConnectorServicesServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ClientConnectorServicesServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/__init__.py deleted file mode 100644 index 64ab5e6fff07..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ClientConnectorServicesServiceTransport -from .grpc import ClientConnectorServicesServiceGrpcTransport -from .grpc_asyncio import ClientConnectorServicesServiceGrpcAsyncIOTransport -from .rest import ClientConnectorServicesServiceRestTransport -from .rest import ClientConnectorServicesServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ClientConnectorServicesServiceTransport]] -_transport_registry['grpc'] = ClientConnectorServicesServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ClientConnectorServicesServiceGrpcAsyncIOTransport -_transport_registry['rest'] = ClientConnectorServicesServiceRestTransport - -__all__ = ( - 'ClientConnectorServicesServiceTransport', - 'ClientConnectorServicesServiceGrpcTransport', - 'ClientConnectorServicesServiceGrpcAsyncIOTransport', - 'ClientConnectorServicesServiceRestTransport', - 'ClientConnectorServicesServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/base.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/base.py deleted file mode 100644 index 61a08e951cac..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/base.py +++ /dev/null @@ -1,347 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.beyondcorp_clientconnectorservices_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class ClientConnectorServicesServiceTransport(abc.ABC): - """Abstract transport class for ClientConnectorServicesService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'beyondcorp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_client_connector_services: gapic_v1.method.wrap_method( - self.list_client_connector_services, - default_timeout=None, - client_info=client_info, - ), - self.get_client_connector_service: gapic_v1.method.wrap_method( - self.get_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.create_client_connector_service: gapic_v1.method.wrap_method( - self.create_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.update_client_connector_service: gapic_v1.method.wrap_method( - self.update_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.delete_client_connector_service: gapic_v1.method.wrap_method( - self.delete_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_client_connector_services(self) -> Callable[ - [client_connector_services_service.ListClientConnectorServicesRequest], - Union[ - client_connector_services_service.ListClientConnectorServicesResponse, - Awaitable[client_connector_services_service.ListClientConnectorServicesResponse] - ]]: - raise NotImplementedError() - - @property - def get_client_connector_service(self) -> Callable[ - [client_connector_services_service.GetClientConnectorServiceRequest], - Union[ - client_connector_services_service.ClientConnectorService, - Awaitable[client_connector_services_service.ClientConnectorService] - ]]: - raise NotImplementedError() - - @property - def create_client_connector_service(self) -> Callable[ - [client_connector_services_service.CreateClientConnectorServiceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def update_client_connector_service(self) -> Callable[ - [client_connector_services_service.UpdateClientConnectorServiceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_client_connector_service(self) -> Callable[ - [client_connector_services_service.DeleteClientConnectorServiceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ClientConnectorServicesServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/grpc.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/grpc.py deleted file mode 100644 index faee26cf8b1e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/grpc.py +++ /dev/null @@ -1,676 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import ClientConnectorServicesServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ClientConnectorServicesServiceGrpcTransport(ClientConnectorServicesServiceTransport): - """gRPC backend transport for ClientConnectorServicesService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientConnectorServicesService exposes the following resources: - - - Client Connector Services, named as follows: - ``projects/{project_id}/locations/{location_id}/client_connector_services/{client_connector_service_id}``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_client_connector_services(self) -> Callable[ - [client_connector_services_service.ListClientConnectorServicesRequest], - client_connector_services_service.ListClientConnectorServicesResponse]: - r"""Return a callable for the list client connector services method over gRPC. - - Lists ClientConnectorServices in a given project and - location. - - Returns: - Callable[[~.ListClientConnectorServicesRequest], - ~.ListClientConnectorServicesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_client_connector_services' not in self._stubs: - self._stubs['list_client_connector_services'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/ListClientConnectorServices', - request_serializer=client_connector_services_service.ListClientConnectorServicesRequest.serialize, - response_deserializer=client_connector_services_service.ListClientConnectorServicesResponse.deserialize, - ) - return self._stubs['list_client_connector_services'] - - @property - def get_client_connector_service(self) -> Callable[ - [client_connector_services_service.GetClientConnectorServiceRequest], - client_connector_services_service.ClientConnectorService]: - r"""Return a callable for the get client connector service method over gRPC. - - Gets details of a single ClientConnectorService. - - Returns: - Callable[[~.GetClientConnectorServiceRequest], - ~.ClientConnectorService]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_client_connector_service' not in self._stubs: - self._stubs['get_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/GetClientConnectorService', - request_serializer=client_connector_services_service.GetClientConnectorServiceRequest.serialize, - response_deserializer=client_connector_services_service.ClientConnectorService.deserialize, - ) - return self._stubs['get_client_connector_service'] - - @property - def create_client_connector_service(self) -> Callable[ - [client_connector_services_service.CreateClientConnectorServiceRequest], - operations_pb2.Operation]: - r"""Return a callable for the create client connector - service method over gRPC. - - Creates a new ClientConnectorService in a given - project and location. - - Returns: - Callable[[~.CreateClientConnectorServiceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_client_connector_service' not in self._stubs: - self._stubs['create_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/CreateClientConnectorService', - request_serializer=client_connector_services_service.CreateClientConnectorServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_client_connector_service'] - - @property - def update_client_connector_service(self) -> Callable[ - [client_connector_services_service.UpdateClientConnectorServiceRequest], - operations_pb2.Operation]: - r"""Return a callable for the update client connector - service method over gRPC. - - Updates the parameters of a single - ClientConnectorService. - - Returns: - Callable[[~.UpdateClientConnectorServiceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_client_connector_service' not in self._stubs: - self._stubs['update_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/UpdateClientConnectorService', - request_serializer=client_connector_services_service.UpdateClientConnectorServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_client_connector_service'] - - @property - def delete_client_connector_service(self) -> Callable[ - [client_connector_services_service.DeleteClientConnectorServiceRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete client connector - service method over gRPC. - - Deletes a single ClientConnectorService. - - Returns: - Callable[[~.DeleteClientConnectorServiceRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_client_connector_service' not in self._stubs: - self._stubs['delete_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/DeleteClientConnectorService', - request_serializer=client_connector_services_service.DeleteClientConnectorServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_client_connector_service'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ClientConnectorServicesServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/grpc_asyncio.py deleted file mode 100644 index 698e30490483..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,761 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import ClientConnectorServicesServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ClientConnectorServicesServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ClientConnectorServicesServiceGrpcAsyncIOTransport(ClientConnectorServicesServiceTransport): - """gRPC AsyncIO backend transport for ClientConnectorServicesService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientConnectorServicesService exposes the following resources: - - - Client Connector Services, named as follows: - ``projects/{project_id}/locations/{location_id}/client_connector_services/{client_connector_service_id}``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_client_connector_services(self) -> Callable[ - [client_connector_services_service.ListClientConnectorServicesRequest], - Awaitable[client_connector_services_service.ListClientConnectorServicesResponse]]: - r"""Return a callable for the list client connector services method over gRPC. - - Lists ClientConnectorServices in a given project and - location. - - Returns: - Callable[[~.ListClientConnectorServicesRequest], - Awaitable[~.ListClientConnectorServicesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_client_connector_services' not in self._stubs: - self._stubs['list_client_connector_services'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/ListClientConnectorServices', - request_serializer=client_connector_services_service.ListClientConnectorServicesRequest.serialize, - response_deserializer=client_connector_services_service.ListClientConnectorServicesResponse.deserialize, - ) - return self._stubs['list_client_connector_services'] - - @property - def get_client_connector_service(self) -> Callable[ - [client_connector_services_service.GetClientConnectorServiceRequest], - Awaitable[client_connector_services_service.ClientConnectorService]]: - r"""Return a callable for the get client connector service method over gRPC. - - Gets details of a single ClientConnectorService. - - Returns: - Callable[[~.GetClientConnectorServiceRequest], - Awaitable[~.ClientConnectorService]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_client_connector_service' not in self._stubs: - self._stubs['get_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/GetClientConnectorService', - request_serializer=client_connector_services_service.GetClientConnectorServiceRequest.serialize, - response_deserializer=client_connector_services_service.ClientConnectorService.deserialize, - ) - return self._stubs['get_client_connector_service'] - - @property - def create_client_connector_service(self) -> Callable[ - [client_connector_services_service.CreateClientConnectorServiceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create client connector - service method over gRPC. - - Creates a new ClientConnectorService in a given - project and location. - - Returns: - Callable[[~.CreateClientConnectorServiceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_client_connector_service' not in self._stubs: - self._stubs['create_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/CreateClientConnectorService', - request_serializer=client_connector_services_service.CreateClientConnectorServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_client_connector_service'] - - @property - def update_client_connector_service(self) -> Callable[ - [client_connector_services_service.UpdateClientConnectorServiceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the update client connector - service method over gRPC. - - Updates the parameters of a single - ClientConnectorService. - - Returns: - Callable[[~.UpdateClientConnectorServiceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_client_connector_service' not in self._stubs: - self._stubs['update_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/UpdateClientConnectorService', - request_serializer=client_connector_services_service.UpdateClientConnectorServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['update_client_connector_service'] - - @property - def delete_client_connector_service(self) -> Callable[ - [client_connector_services_service.DeleteClientConnectorServiceRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete client connector - service method over gRPC. - - Deletes a single ClientConnectorService. - - Returns: - Callable[[~.DeleteClientConnectorServiceRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_client_connector_service' not in self._stubs: - self._stubs['delete_client_connector_service'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService/DeleteClientConnectorService', - request_serializer=client_connector_services_service.DeleteClientConnectorServiceRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_client_connector_service'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_client_connector_services: self._wrap_method( - self.list_client_connector_services, - default_timeout=None, - client_info=client_info, - ), - self.get_client_connector_service: self._wrap_method( - self.get_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.create_client_connector_service: self._wrap_method( - self.create_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.update_client_connector_service: self._wrap_method( - self.update_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.delete_client_connector_service: self._wrap_method( - self.delete_client_connector_service, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'ClientConnectorServicesServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py deleted file mode 100644 index 8af74b9ca55a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest.py +++ /dev/null @@ -1,2328 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseClientConnectorServicesServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class ClientConnectorServicesServiceRestInterceptor: - """Interceptor for ClientConnectorServicesService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ClientConnectorServicesServiceRestTransport. - - .. code-block:: python - class MyCustomClientConnectorServicesServiceInterceptor(ClientConnectorServicesServiceRestInterceptor): - def pre_create_client_connector_service(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_client_connector_service(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_client_connector_service(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_client_connector_service(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_client_connector_service(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_client_connector_service(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_client_connector_services(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_client_connector_services(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_client_connector_service(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_client_connector_service(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ClientConnectorServicesServiceRestTransport(interceptor=MyCustomClientConnectorServicesServiceInterceptor()) - client = ClientConnectorServicesServiceClient(transport=transport) - - - """ - def pre_create_client_connector_service(self, request: client_connector_services_service.CreateClientConnectorServiceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.CreateClientConnectorServiceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_client_connector_service - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_create_client_connector_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_client_connector_service - - DEPRECATED. Please use the `post_create_client_connector_service_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. This `post_create_client_connector_service` interceptor runs - before the `post_create_client_connector_service_with_metadata` interceptor. - """ - return response - - def post_create_client_connector_service_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_client_connector_service - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientConnectorServicesService server but before it is returned to user code. - - We recommend only using this `post_create_client_connector_service_with_metadata` - interceptor in new development instead of the `post_create_client_connector_service` interceptor. - When both interceptors are used, this `post_create_client_connector_service_with_metadata` interceptor runs after the - `post_create_client_connector_service` interceptor. The (possibly modified) response returned by - `post_create_client_connector_service` will be passed to - `post_create_client_connector_service_with_metadata`. - """ - return response, metadata - - def pre_delete_client_connector_service(self, request: client_connector_services_service.DeleteClientConnectorServiceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.DeleteClientConnectorServiceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_client_connector_service - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_delete_client_connector_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_client_connector_service - - DEPRECATED. Please use the `post_delete_client_connector_service_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. This `post_delete_client_connector_service` interceptor runs - before the `post_delete_client_connector_service_with_metadata` interceptor. - """ - return response - - def post_delete_client_connector_service_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_client_connector_service - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientConnectorServicesService server but before it is returned to user code. - - We recommend only using this `post_delete_client_connector_service_with_metadata` - interceptor in new development instead of the `post_delete_client_connector_service` interceptor. - When both interceptors are used, this `post_delete_client_connector_service_with_metadata` interceptor runs after the - `post_delete_client_connector_service` interceptor. The (possibly modified) response returned by - `post_delete_client_connector_service` will be passed to - `post_delete_client_connector_service_with_metadata`. - """ - return response, metadata - - def pre_get_client_connector_service(self, request: client_connector_services_service.GetClientConnectorServiceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.GetClientConnectorServiceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_client_connector_service - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_get_client_connector_service(self, response: client_connector_services_service.ClientConnectorService) -> client_connector_services_service.ClientConnectorService: - """Post-rpc interceptor for get_client_connector_service - - DEPRECATED. Please use the `post_get_client_connector_service_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. This `post_get_client_connector_service` interceptor runs - before the `post_get_client_connector_service_with_metadata` interceptor. - """ - return response - - def post_get_client_connector_service_with_metadata(self, response: client_connector_services_service.ClientConnectorService, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.ClientConnectorService, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_client_connector_service - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientConnectorServicesService server but before it is returned to user code. - - We recommend only using this `post_get_client_connector_service_with_metadata` - interceptor in new development instead of the `post_get_client_connector_service` interceptor. - When both interceptors are used, this `post_get_client_connector_service_with_metadata` interceptor runs after the - `post_get_client_connector_service` interceptor. The (possibly modified) response returned by - `post_get_client_connector_service` will be passed to - `post_get_client_connector_service_with_metadata`. - """ - return response, metadata - - def pre_list_client_connector_services(self, request: client_connector_services_service.ListClientConnectorServicesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.ListClientConnectorServicesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_client_connector_services - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_list_client_connector_services(self, response: client_connector_services_service.ListClientConnectorServicesResponse) -> client_connector_services_service.ListClientConnectorServicesResponse: - """Post-rpc interceptor for list_client_connector_services - - DEPRECATED. Please use the `post_list_client_connector_services_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. This `post_list_client_connector_services` interceptor runs - before the `post_list_client_connector_services_with_metadata` interceptor. - """ - return response - - def post_list_client_connector_services_with_metadata(self, response: client_connector_services_service.ListClientConnectorServicesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.ListClientConnectorServicesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_client_connector_services - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientConnectorServicesService server but before it is returned to user code. - - We recommend only using this `post_list_client_connector_services_with_metadata` - interceptor in new development instead of the `post_list_client_connector_services` interceptor. - When both interceptors are used, this `post_list_client_connector_services_with_metadata` interceptor runs after the - `post_list_client_connector_services` interceptor. The (possibly modified) response returned by - `post_list_client_connector_services` will be passed to - `post_list_client_connector_services_with_metadata`. - """ - return response, metadata - - def pre_update_client_connector_service(self, request: client_connector_services_service.UpdateClientConnectorServiceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_connector_services_service.UpdateClientConnectorServiceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_client_connector_service - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_update_client_connector_service(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for update_client_connector_service - - DEPRECATED. Please use the `post_update_client_connector_service_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. This `post_update_client_connector_service` interceptor runs - before the `post_update_client_connector_service_with_metadata` interceptor. - """ - return response - - def post_update_client_connector_service_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_client_connector_service - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientConnectorServicesService server but before it is returned to user code. - - We recommend only using this `post_update_client_connector_service_with_metadata` - interceptor in new development instead of the `post_update_client_connector_service` interceptor. - When both interceptors are used, this `post_update_client_connector_service_with_metadata` interceptor runs after the - `post_update_client_connector_service` interceptor. The (possibly modified) response returned by - `post_update_client_connector_service` will be passed to - `post_update_client_connector_service_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientConnectorServicesService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the ClientConnectorServicesService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ClientConnectorServicesServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ClientConnectorServicesServiceRestInterceptor - - -class ClientConnectorServicesServiceRestTransport(_BaseClientConnectorServicesServiceRestTransport): - """REST backend synchronous transport for ClientConnectorServicesService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientConnectorServicesService exposes the following resources: - - - Client Connector Services, named as follows: - ``projects/{project_id}/locations/{location_id}/client_connector_services/{client_connector_service_id}``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ClientConnectorServicesServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ClientConnectorServicesServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateClientConnectorService(_BaseClientConnectorServicesServiceRestTransport._BaseCreateClientConnectorService, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.CreateClientConnectorService") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: client_connector_services_service.CreateClientConnectorServiceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create client connector - service method over HTTP. - - Args: - request (~.client_connector_services_service.CreateClientConnectorServiceRequest): - The request object. Message for creating a - ClientConnectorService. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseCreateClientConnectorService._get_http_options() - - request, metadata = self._interceptor.pre_create_client_connector_service(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseCreateClientConnectorService._get_transcoded_request(http_options, request) - - body = _BaseClientConnectorServicesServiceRestTransport._BaseCreateClientConnectorService._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseCreateClientConnectorService._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.CreateClientConnectorService", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "CreateClientConnectorService", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._CreateClientConnectorService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_client_connector_service(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_client_connector_service_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.create_client_connector_service", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "CreateClientConnectorService", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteClientConnectorService(_BaseClientConnectorServicesServiceRestTransport._BaseDeleteClientConnectorService, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.DeleteClientConnectorService") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: client_connector_services_service.DeleteClientConnectorServiceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete client connector - service method over HTTP. - - Args: - request (~.client_connector_services_service.DeleteClientConnectorServiceRequest): - The request object. Message for deleting a - ClientConnectorService. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseDeleteClientConnectorService._get_http_options() - - request, metadata = self._interceptor.pre_delete_client_connector_service(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseDeleteClientConnectorService._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseDeleteClientConnectorService._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.DeleteClientConnectorService", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "DeleteClientConnectorService", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._DeleteClientConnectorService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_client_connector_service(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_client_connector_service_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.delete_client_connector_service", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "DeleteClientConnectorService", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetClientConnectorService(_BaseClientConnectorServicesServiceRestTransport._BaseGetClientConnectorService, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.GetClientConnectorService") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: client_connector_services_service.GetClientConnectorServiceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> client_connector_services_service.ClientConnectorService: - r"""Call the get client connector - service method over HTTP. - - Args: - request (~.client_connector_services_service.GetClientConnectorServiceRequest): - The request object. Message for getting a - ClientConnectorService. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.client_connector_services_service.ClientConnectorService: - Message describing - ClientConnectorService object. - - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseGetClientConnectorService._get_http_options() - - request, metadata = self._interceptor.pre_get_client_connector_service(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseGetClientConnectorService._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseGetClientConnectorService._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.GetClientConnectorService", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetClientConnectorService", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._GetClientConnectorService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = client_connector_services_service.ClientConnectorService() - pb_resp = client_connector_services_service.ClientConnectorService.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_client_connector_service(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_client_connector_service_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = client_connector_services_service.ClientConnectorService.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.get_client_connector_service", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetClientConnectorService", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListClientConnectorServices(_BaseClientConnectorServicesServiceRestTransport._BaseListClientConnectorServices, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.ListClientConnectorServices") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: client_connector_services_service.ListClientConnectorServicesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> client_connector_services_service.ListClientConnectorServicesResponse: - r"""Call the list client connector - services method over HTTP. - - Args: - request (~.client_connector_services_service.ListClientConnectorServicesRequest): - The request object. Message for requesting list of - ClientConnectorServices. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.client_connector_services_service.ListClientConnectorServicesResponse: - Message for response to listing - ClientConnectorServices. - - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseListClientConnectorServices._get_http_options() - - request, metadata = self._interceptor.pre_list_client_connector_services(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseListClientConnectorServices._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseListClientConnectorServices._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.ListClientConnectorServices", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "ListClientConnectorServices", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._ListClientConnectorServices._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = client_connector_services_service.ListClientConnectorServicesResponse() - pb_resp = client_connector_services_service.ListClientConnectorServicesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_client_connector_services(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_client_connector_services_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = client_connector_services_service.ListClientConnectorServicesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.list_client_connector_services", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "ListClientConnectorServices", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateClientConnectorService(_BaseClientConnectorServicesServiceRestTransport._BaseUpdateClientConnectorService, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.UpdateClientConnectorService") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: client_connector_services_service.UpdateClientConnectorServiceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the update client connector - service method over HTTP. - - Args: - request (~.client_connector_services_service.UpdateClientConnectorServiceRequest): - The request object. Message for updating a - ClientConnectorService - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseUpdateClientConnectorService._get_http_options() - - request, metadata = self._interceptor.pre_update_client_connector_service(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseUpdateClientConnectorService._get_transcoded_request(http_options, request) - - body = _BaseClientConnectorServicesServiceRestTransport._BaseUpdateClientConnectorService._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseUpdateClientConnectorService._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.UpdateClientConnectorService", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "UpdateClientConnectorService", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._UpdateClientConnectorService._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_client_connector_service(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_client_connector_service_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.update_client_connector_service", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "UpdateClientConnectorService", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_client_connector_service(self) -> Callable[ - [client_connector_services_service.CreateClientConnectorServiceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateClientConnectorService(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_client_connector_service(self) -> Callable[ - [client_connector_services_service.DeleteClientConnectorServiceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteClientConnectorService(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_client_connector_service(self) -> Callable[ - [client_connector_services_service.GetClientConnectorServiceRequest], - client_connector_services_service.ClientConnectorService]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetClientConnectorService(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_client_connector_services(self) -> Callable[ - [client_connector_services_service.ListClientConnectorServicesRequest], - client_connector_services_service.ListClientConnectorServicesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListClientConnectorServices(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_client_connector_service(self) -> Callable[ - [client_connector_services_service.UpdateClientConnectorServiceRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateClientConnectorService(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseClientConnectorServicesServiceRestTransport._BaseGetLocation, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseClientConnectorServicesServiceRestTransport._BaseListLocations, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(_BaseClientConnectorServicesServiceRestTransport._BaseGetIamPolicy, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(_BaseClientConnectorServicesServiceRestTransport._BaseSetIamPolicy, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseClientConnectorServicesServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "SetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(_BaseClientConnectorServicesServiceRestTransport._BaseTestIamPermissions, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseClientConnectorServicesServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "TestIamPermissions", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseClientConnectorServicesServiceRestTransport._BaseCancelOperation, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseClientConnectorServicesServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseClientConnectorServicesServiceRestTransport._BaseDeleteOperation, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseClientConnectorServicesServiceRestTransport._BaseGetOperation, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseClientConnectorServicesServiceRestTransport._BaseListOperations, ClientConnectorServicesServiceRestStub): - def __hash__(self): - return hash("ClientConnectorServicesServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseClientConnectorServicesServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseClientConnectorServicesServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientConnectorServicesServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientConnectorServicesServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ClientConnectorServicesServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest_base.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest_base.py deleted file mode 100644 index a9290f4f90d0..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/transports/rest_base.py +++ /dev/null @@ -1,596 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import ClientConnectorServicesServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseClientConnectorServicesServiceRestTransport(ClientConnectorServicesServiceTransport): - """Base REST backend transport for ClientConnectorServicesService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateClientConnectorService: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/clientConnectorServices', - 'body': 'client_connector_service', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_connector_services_service.CreateClientConnectorServiceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientConnectorServicesServiceRestTransport._BaseCreateClientConnectorService._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteClientConnectorService: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/clientConnectorServices/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_connector_services_service.DeleteClientConnectorServiceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientConnectorServicesServiceRestTransport._BaseDeleteClientConnectorService._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetClientConnectorService: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/clientConnectorServices/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_connector_services_service.GetClientConnectorServiceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientConnectorServicesServiceRestTransport._BaseGetClientConnectorService._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListClientConnectorServices: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/clientConnectorServices', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_connector_services_service.ListClientConnectorServicesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientConnectorServicesServiceRestTransport._BaseListClientConnectorServices._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateClientConnectorService: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{client_connector_service.name=projects/*/locations/*/clientConnectorServices/*}', - 'body': 'client_connector_service', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_connector_services_service.UpdateClientConnectorServiceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientConnectorServicesServiceRestTransport._BaseUpdateClientConnectorService._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseClientConnectorServicesServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/types/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/types/__init__.py deleted file mode 100644 index 58e876002f6f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/types/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client_connector_services_service import ( - ClientConnectorService, - ClientConnectorServiceOperationMetadata, - CreateClientConnectorServiceRequest, - DeleteClientConnectorServiceRequest, - GetClientConnectorServiceRequest, - ListClientConnectorServicesRequest, - ListClientConnectorServicesResponse, - UpdateClientConnectorServiceRequest, -) - -__all__ = ( - 'ClientConnectorService', - 'ClientConnectorServiceOperationMetadata', - 'CreateClientConnectorServiceRequest', - 'DeleteClientConnectorServiceRequest', - 'GetClientConnectorServiceRequest', - 'ListClientConnectorServicesRequest', - 'ListClientConnectorServicesResponse', - 'UpdateClientConnectorServiceRequest', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/types/client_connector_services_service.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/types/client_connector_services_service.py deleted file mode 100644 index 306c746d96a0..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/google/cloud/beyondcorp_clientconnectorservices_v1/types/client_connector_services_service.py +++ /dev/null @@ -1,581 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.clientconnectorservices.v1', - manifest={ - 'ClientConnectorService', - 'ListClientConnectorServicesRequest', - 'ListClientConnectorServicesResponse', - 'GetClientConnectorServiceRequest', - 'CreateClientConnectorServiceRequest', - 'UpdateClientConnectorServiceRequest', - 'DeleteClientConnectorServiceRequest', - 'ClientConnectorServiceOperationMetadata', - }, -) - - -class ClientConnectorService(proto.Message): - r"""Message describing ClientConnectorService object. - - Attributes: - name (str): - Required. Name of resource. The name is - ignored during creation. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. [Output only] Create time stamp. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. [Output only] Update time stamp. - display_name (str): - Optional. User-provided name. The display name should follow - certain format. - - - Must be 6 to 30 characters in length. - - Can only contain lowercase letters, numbers, and hyphens. - - Must start with a letter. - ingress (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.Ingress): - Required. The details of the ingress - settings. - egress (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.Egress): - Required. The details of the egress settings. - state (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.State): - Output only. The operational state of the - ClientConnectorService. - """ - class State(proto.Enum): - r"""Represents the different states of a ClientConnectorService. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - CREATING (1): - ClientConnectorService is being created. - UPDATING (2): - ClientConnectorService is being updated. - DELETING (3): - ClientConnectorService is being deleted. - RUNNING (4): - ClientConnectorService is running. - DOWN (5): - ClientConnectorService is down and may be - restored in the future. This happens when CCFE - sends ProjectState = OFF. - ERROR (6): - ClientConnectorService encountered an error - and is in an indeterministic state. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - UPDATING = 2 - DELETING = 3 - RUNNING = 4 - DOWN = 5 - ERROR = 6 - - class Ingress(proto.Message): - r"""Settings of how to connect to the ClientGateway. - One of the following options should be set. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - config (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.Ingress.Config): - The basic ingress config for ClientGateways. - - This field is a member of `oneof`_ ``ingress_config``. - """ - - class Config(proto.Message): - r"""The basic ingress config for ClientGateways. - - Attributes: - transport_protocol (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.Ingress.Config.TransportProtocol): - Required. Immutable. The transport protocol - used between the client and the server. - destination_routes (MutableSequence[google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.Ingress.Config.DestinationRoute]): - Required. The settings used to configure - basic ClientGateways. - """ - class TransportProtocol(proto.Enum): - r"""The protocol used to connect to the server. - - Values: - TRANSPORT_PROTOCOL_UNSPECIFIED (0): - Default value. This value is unused. - TCP (1): - TCP protocol. - """ - TRANSPORT_PROTOCOL_UNSPECIFIED = 0 - TCP = 1 - - class DestinationRoute(proto.Message): - r"""The setting used to configure ClientGateways. - It is adding routes to the client's routing table - after the connection is established. - - Attributes: - address (str): - Required. The network address of the subnet - for which the packet is routed to the - ClientGateway. - netmask (str): - Required. The network mask of the subnet - for which the packet is routed to the - ClientGateway. - """ - - address: str = proto.Field( - proto.STRING, - number=1, - ) - netmask: str = proto.Field( - proto.STRING, - number=2, - ) - - transport_protocol: 'ClientConnectorService.Ingress.Config.TransportProtocol' = proto.Field( - proto.ENUM, - number=1, - enum='ClientConnectorService.Ingress.Config.TransportProtocol', - ) - destination_routes: MutableSequence['ClientConnectorService.Ingress.Config.DestinationRoute'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ClientConnectorService.Ingress.Config.DestinationRoute', - ) - - config: 'ClientConnectorService.Ingress.Config' = proto.Field( - proto.MESSAGE, - number=1, - oneof='ingress_config', - message='ClientConnectorService.Ingress.Config', - ) - - class Egress(proto.Message): - r"""The details of the egress info. One of the following options - should be set. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - peered_vpc (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService.Egress.PeeredVpc): - A VPC from the consumer project. - - This field is a member of `oneof`_ ``destination_type``. - """ - - class PeeredVpc(proto.Message): - r"""The peered VPC owned by the consumer project. - - Attributes: - network_vpc (str): - Required. The name of the peered VPC owned by - the consumer project. - """ - - network_vpc: str = proto.Field( - proto.STRING, - number=1, - ) - - peered_vpc: 'ClientConnectorService.Egress.PeeredVpc' = proto.Field( - proto.MESSAGE, - number=1, - oneof='destination_type', - message='ClientConnectorService.Egress.PeeredVpc', - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - display_name: str = proto.Field( - proto.STRING, - number=4, - ) - ingress: Ingress = proto.Field( - proto.MESSAGE, - number=6, - message=Ingress, - ) - egress: Egress = proto.Field( - proto.MESSAGE, - number=7, - message=Egress, - ) - state: State = proto.Field( - proto.ENUM, - number=8, - enum=State, - ) - - -class ListClientConnectorServicesRequest(proto.Message): - r"""Message for requesting list of ClientConnectorServices. - - Attributes: - parent (str): - Required. Parent value for - ListClientConnectorServicesRequest. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - order_by (str): - Optional. Hint for how to order the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListClientConnectorServicesResponse(proto.Message): - r"""Message for response to listing ClientConnectorServices. - - Attributes: - client_connector_services (MutableSequence[google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService]): - The list of ClientConnectorService. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - client_connector_services: MutableSequence['ClientConnectorService'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClientConnectorService', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetClientConnectorServiceRequest(proto.Message): - r"""Message for getting a ClientConnectorService. - - Attributes: - name (str): - Required. Name of the resource. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateClientConnectorServiceRequest(proto.Message): - r"""Message for creating a ClientConnectorService. - - Attributes: - parent (str): - Required. Value for parent. - client_connector_service_id (str): - Optional. User-settable client connector service resource - ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - A random system generated name will be assigned if not - specified by the user. - client_connector_service (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService): - Required. The resource being created. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - client_connector_service_id: str = proto.Field( - proto.STRING, - number=2, - ) - client_connector_service: 'ClientConnectorService' = proto.Field( - proto.MESSAGE, - number=3, - message='ClientConnectorService', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class UpdateClientConnectorServiceRequest(proto.Message): - r"""Message for updating a ClientConnectorService - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask is used to specify the fields to be - overwritten in the ClientConnectorService resource by the - update. The fields specified in the update_mask are relative - to the resource, not the full request. A field will be - overwritten if it is in the mask. If the user does not - provide a mask then all fields will be overwritten. - - Mutable fields: display_name. - client_connector_service (google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService): - Required. The resource being updated. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - allow_missing (bool): - Optional. If set as true, will create the - resource if it is not found. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - client_connector_service: 'ClientConnectorService' = proto.Field( - proto.MESSAGE, - number=2, - message='ClientConnectorService', - ) - request_id: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DeleteClientConnectorServiceRequest(proto.Message): - r"""Message for deleting a ClientConnectorService. - - Attributes: - name (str): - Required. Name of the resource. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ClientConnectorServiceOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/mypy.ini b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/noxfile.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/noxfile.py deleted file mode 100644 index 8c5d00363485..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-beyondcorp-clientconnectorservices' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_clientconnectorservices_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_clientconnectorservices_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_async.py deleted file mode 100644 index 57fd7cba55be..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_async.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_CreateClientConnectorService_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -async def sample_create_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.CreateClientConnectorServiceRequest( - parent="parent_value", - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.create_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_CreateClientConnectorService_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_sync.py deleted file mode 100644 index dec93284d152..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_sync.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_CreateClientConnectorService_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -def sample_create_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.CreateClientConnectorServiceRequest( - parent="parent_value", - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.create_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_CreateClientConnectorService_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_async.py deleted file mode 100644 index 0e1d26c2f7f2..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_DeleteClientConnectorService_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -async def sample_delete_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.DeleteClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_DeleteClientConnectorService_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_sync.py deleted file mode 100644 index 82f5d48e4cac..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_DeleteClientConnectorService_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -def sample_delete_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.DeleteClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_DeleteClientConnectorService_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_async.py deleted file mode 100644 index 4c8a0fc8da0b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_GetClientConnectorService_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -async def sample_get_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.GetClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_client_connector_service(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_GetClientConnectorService_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_sync.py deleted file mode 100644 index e2bae66564e3..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_GetClientConnectorService_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -def sample_get_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.GetClientConnectorServiceRequest( - name="name_value", - ) - - # Make the request - response = client.get_client_connector_service(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_GetClientConnectorService_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_async.py deleted file mode 100644 index 0d17e78eb27e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListClientConnectorServices -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_ListClientConnectorServices_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -async def sample_list_client_connector_services(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.ListClientConnectorServicesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_connector_services(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_ListClientConnectorServices_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_sync.py deleted file mode 100644 index 0c119a8ae840..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListClientConnectorServices -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_ListClientConnectorServices_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -def sample_list_client_connector_services(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientconnectorservices_v1.ListClientConnectorServicesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_connector_services(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_ListClientConnectorServices_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_async.py deleted file mode 100644 index a500a59021c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_async.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_UpdateClientConnectorService_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -async def sample_update_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.UpdateClientConnectorServiceRequest( - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.update_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_UpdateClientConnectorService_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_sync.py deleted file mode 100644 index 233063683975..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_sync.py +++ /dev/null @@ -1,63 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateClientConnectorService -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientconnectorservices - - -# [START beyondcorp_v1_generated_ClientConnectorServicesService_UpdateClientConnectorService_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientconnectorservices_v1 - - -def sample_update_client_connector_service(): - # Create a client - client = beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient() - - # Initialize request argument(s) - client_connector_service = beyondcorp_clientconnectorservices_v1.ClientConnectorService() - client_connector_service.name = "name_value" - client_connector_service.ingress.config.transport_protocol = "TCP" - client_connector_service.ingress.config.destination_routes.address = "address_value" - client_connector_service.ingress.config.destination_routes.netmask = "netmask_value" - client_connector_service.egress.peered_vpc.network_vpc = "network_vpc_value" - - request = beyondcorp_clientconnectorservices_v1.UpdateClientConnectorServiceRequest( - client_connector_service=client_connector_service, - ) - - # Make the request - operation = client.update_client_connector_service(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientConnectorServicesService_UpdateClientConnectorService_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json deleted file mode 100644 index e2e2d8f829d9..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json +++ /dev/null @@ -1,844 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.beyondcorp.clientconnectorservices.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-beyondcorp-clientconnectorservices", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient", - "shortName": "ClientConnectorServicesServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.create_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.CreateClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "CreateClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.CreateClientConnectorServiceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "client_connector_service", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService" - }, - { - "name": "client_connector_service_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_client_connector_service" - }, - "description": "Sample for CreateClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_CreateClientConnectorService_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient", - "shortName": "ClientConnectorServicesServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient.create_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.CreateClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "CreateClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.CreateClientConnectorServiceRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "client_connector_service", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService" - }, - { - "name": "client_connector_service_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_client_connector_service" - }, - "description": "Sample for CreateClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_CreateClientConnectorService_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 53, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 60, - "start": 54, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 61, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_create_client_connector_service_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient", - "shortName": "ClientConnectorServicesServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.delete_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.DeleteClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "DeleteClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.DeleteClientConnectorServiceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_client_connector_service" - }, - "description": "Sample for DeleteClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_DeleteClientConnectorService_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient", - "shortName": "ClientConnectorServicesServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient.delete_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.DeleteClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "DeleteClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.DeleteClientConnectorServiceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_client_connector_service" - }, - "description": "Sample for DeleteClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_DeleteClientConnectorService_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_delete_client_connector_service_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient", - "shortName": "ClientConnectorServicesServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.get_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.GetClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "GetClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.GetClientConnectorServiceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService", - "shortName": "get_client_connector_service" - }, - "description": "Sample for GetClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_GetClientConnectorService_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient", - "shortName": "ClientConnectorServicesServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient.get_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.GetClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "GetClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.GetClientConnectorServiceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService", - "shortName": "get_client_connector_service" - }, - "description": "Sample for GetClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_GetClientConnectorService_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_get_client_connector_service_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient", - "shortName": "ClientConnectorServicesServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.list_client_connector_services", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.ListClientConnectorServices", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "ListClientConnectorServices" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.pagers.ListClientConnectorServicesAsyncPager", - "shortName": "list_client_connector_services" - }, - "description": "Sample for ListClientConnectorServices", - "file": "beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_ListClientConnectorServices_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient", - "shortName": "ClientConnectorServicesServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient.list_client_connector_services", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.ListClientConnectorServices", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "ListClientConnectorServices" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ListClientConnectorServicesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.pagers.ListClientConnectorServicesPager", - "shortName": "list_client_connector_services" - }, - "description": "Sample for ListClientConnectorServices", - "file": "beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_ListClientConnectorServices_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_list_client_connector_services_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient", - "shortName": "ClientConnectorServicesServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceAsyncClient.update_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.UpdateClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "UpdateClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.UpdateClientConnectorServiceRequest" - }, - { - "name": "client_connector_service", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_client_connector_service" - }, - "description": "Sample for UpdateClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_UpdateClientConnectorService_async", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient", - "shortName": "ClientConnectorServicesServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientconnectorservices_v1.ClientConnectorServicesServiceClient.update_client_connector_service", - "method": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService.UpdateClientConnectorService", - "service": { - "fullName": "google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesService", - "shortName": "ClientConnectorServicesService" - }, - "shortName": "UpdateClientConnectorService" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.UpdateClientConnectorServiceRequest" - }, - { - "name": "client_connector_service", - "type": "google.cloud.beyondcorp_clientconnectorservices_v1.types.ClientConnectorService" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_client_connector_service" - }, - "description": "Sample for UpdateClientConnectorService", - "file": "beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientConnectorServicesService_UpdateClientConnectorService_sync", - "segments": [ - { - "end": 62, - "start": 27, - "type": "FULL" - }, - { - "end": 62, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 52, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 53, - "type": "REQUEST_EXECUTION" - }, - { - "end": 63, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_connector_services_service_update_client_connector_service_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/scripts/fixup_beyondcorp_clientconnectorservices_v1_keywords.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/scripts/fixup_beyondcorp_clientconnectorservices_v1_keywords.py deleted file mode 100644 index 1cf05fac75f9..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/scripts/fixup_beyondcorp_clientconnectorservices_v1_keywords.py +++ /dev/null @@ -1,180 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class beyondcorp_clientconnectorservicesCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_client_connector_service': ('parent', 'client_connector_service', 'client_connector_service_id', 'request_id', 'validate_only', ), - 'delete_client_connector_service': ('name', 'request_id', 'validate_only', ), - 'get_client_connector_service': ('name', ), - 'list_client_connector_services': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'update_client_connector_service': ('update_mask', 'client_connector_service', 'request_id', 'validate_only', 'allow_missing', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=beyondcorp_clientconnectorservicesCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the beyondcorp_clientconnectorservices client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/setup.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/setup.py deleted file mode 100644 index a09090f77e3b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-beyondcorp-clientconnectorservices' - - -description = "Google Cloud Beyondcorp Clientconnectorservices API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/beyondcorp_clientconnectorservices/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientconnectorservices" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py b/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py deleted file mode 100644 index 1db15c0925ad..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientconnectorservices/v1/tests/unit/gapic/beyondcorp_clientconnectorservices_v1/test_client_connector_services_service.py +++ /dev/null @@ -1,7028 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service import ClientConnectorServicesServiceAsyncClient -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service import ClientConnectorServicesServiceClient -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service import pagers -from google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service import transports -from google.cloud.beyondcorp_clientconnectorservices_v1.types import client_connector_services_service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ClientConnectorServicesServiceClient._get_default_mtls_endpoint(None) is None - assert ClientConnectorServicesServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ClientConnectorServicesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ClientConnectorServicesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ClientConnectorServicesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ClientConnectorServicesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ClientConnectorServicesServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - ClientConnectorServicesServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ClientConnectorServicesServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert ClientConnectorServicesServiceClient._get_client_cert_source(None, False) is None - assert ClientConnectorServicesServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert ClientConnectorServicesServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ClientConnectorServicesServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert ClientConnectorServicesServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(ClientConnectorServicesServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceClient)) -@mock.patch.object(ClientConnectorServicesServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - default_endpoint = ClientConnectorServicesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ClientConnectorServicesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert ClientConnectorServicesServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ClientConnectorServicesServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ClientConnectorServicesServiceClient.DEFAULT_MTLS_ENDPOINT - assert ClientConnectorServicesServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ClientConnectorServicesServiceClient._get_api_endpoint(None, None, default_universe, "always") == ClientConnectorServicesServiceClient.DEFAULT_MTLS_ENDPOINT - assert ClientConnectorServicesServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ClientConnectorServicesServiceClient.DEFAULT_MTLS_ENDPOINT - assert ClientConnectorServicesServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ClientConnectorServicesServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - ClientConnectorServicesServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ClientConnectorServicesServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ClientConnectorServicesServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ClientConnectorServicesServiceClient._get_universe_domain(None, None) == ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - ClientConnectorServicesServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = ClientConnectorServicesServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = ClientConnectorServicesServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (ClientConnectorServicesServiceClient, "grpc"), - (ClientConnectorServicesServiceAsyncClient, "grpc_asyncio"), - (ClientConnectorServicesServiceClient, "rest"), -]) -def test_client_connector_services_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ClientConnectorServicesServiceGrpcTransport, "grpc"), - (transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ClientConnectorServicesServiceRestTransport, "rest"), -]) -def test_client_connector_services_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ClientConnectorServicesServiceClient, "grpc"), - (ClientConnectorServicesServiceAsyncClient, "grpc_asyncio"), - (ClientConnectorServicesServiceClient, "rest"), -]) -def test_client_connector_services_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -def test_client_connector_services_service_client_get_transport_class(): - transport = ClientConnectorServicesServiceClient.get_transport_class() - available_transports = [ - transports.ClientConnectorServicesServiceGrpcTransport, - transports.ClientConnectorServicesServiceRestTransport, - ] - assert transport in available_transports - - transport = ClientConnectorServicesServiceClient.get_transport_class("grpc") - assert transport == transports.ClientConnectorServicesServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport, "grpc"), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceRestTransport, "rest"), -]) -@mock.patch.object(ClientConnectorServicesServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceClient)) -@mock.patch.object(ClientConnectorServicesServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceAsyncClient)) -def test_client_connector_services_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ClientConnectorServicesServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ClientConnectorServicesServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport, "grpc", "true"), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport, "grpc", "false"), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceRestTransport, "rest", "true"), - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(ClientConnectorServicesServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceClient)) -@mock.patch.object(ClientConnectorServicesServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_client_connector_services_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ClientConnectorServicesServiceClient, ClientConnectorServicesServiceAsyncClient -]) -@mock.patch.object(ClientConnectorServicesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClientConnectorServicesServiceClient)) -@mock.patch.object(ClientConnectorServicesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClientConnectorServicesServiceAsyncClient)) -def test_client_connector_services_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - ClientConnectorServicesServiceClient, ClientConnectorServicesServiceAsyncClient -]) -@mock.patch.object(ClientConnectorServicesServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceClient)) -@mock.patch.object(ClientConnectorServicesServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientConnectorServicesServiceAsyncClient)) -def test_client_connector_services_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = ClientConnectorServicesServiceClient._DEFAULT_UNIVERSE - default_endpoint = ClientConnectorServicesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ClientConnectorServicesServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport, "grpc"), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceRestTransport, "rest"), -]) -def test_client_connector_services_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport, "grpc", grpc_helpers), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceRestTransport, "rest", None), -]) -def test_client_connector_services_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_client_connector_services_service_client_client_options_from_dict(): - with mock.patch('google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.transports.ClientConnectorServicesServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ClientConnectorServicesServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport, "grpc", grpc_helpers), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_client_connector_services_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.ListClientConnectorServicesRequest, - dict, -]) -def test_list_client_connector_services(request_type, transport: str = 'grpc'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_connector_services_service.ListClientConnectorServicesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.ListClientConnectorServicesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientConnectorServicesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_client_connector_services_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_connector_services_service.ListClientConnectorServicesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_client_connector_services(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_connector_services_service.ListClientConnectorServicesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_client_connector_services_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_client_connector_services in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_client_connector_services] = mock_rpc - request = {} - client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_client_connector_services(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_client_connector_services_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_client_connector_services in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_client_connector_services] = mock_rpc - - request = {} - await client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_client_connector_services(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_client_connector_services_async(transport: str = 'grpc_asyncio', request_type=client_connector_services_service.ListClientConnectorServicesRequest): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ListClientConnectorServicesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.ListClientConnectorServicesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientConnectorServicesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_client_connector_services_async_from_dict(): - await test_list_client_connector_services_async(request_type=dict) - -def test_list_client_connector_services_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.ListClientConnectorServicesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - call.return_value = client_connector_services_service.ListClientConnectorServicesResponse() - client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_client_connector_services_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.ListClientConnectorServicesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ListClientConnectorServicesResponse()) - await client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_client_connector_services_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_connector_services_service.ListClientConnectorServicesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_client_connector_services( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_client_connector_services_flattened_error(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_client_connector_services( - client_connector_services_service.ListClientConnectorServicesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_client_connector_services_flattened_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_connector_services_service.ListClientConnectorServicesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ListClientConnectorServicesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_client_connector_services( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_client_connector_services_flattened_error_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_client_connector_services( - client_connector_services_service.ListClientConnectorServicesRequest(), - parent='parent_value', - ) - - -def test_list_client_connector_services_pager(transport_name: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='abc', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[], - next_page_token='def', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='ghi', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_client_connector_services(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, client_connector_services_service.ClientConnectorService) - for i in results) -def test_list_client_connector_services_pages(transport_name: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='abc', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[], - next_page_token='def', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='ghi', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - ), - RuntimeError, - ) - pages = list(client.list_client_connector_services(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_client_connector_services_async_pager(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='abc', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[], - next_page_token='def', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='ghi', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_client_connector_services(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, client_connector_services_service.ClientConnectorService) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_client_connector_services_async_pages(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='abc', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[], - next_page_token='def', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='ghi', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_client_connector_services(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.GetClientConnectorServiceRequest, - dict, -]) -def test_get_client_connector_service(request_type, transport: str = 'grpc'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_connector_services_service.ClientConnectorService( - name='name_value', - display_name='display_name_value', - state=client_connector_services_service.ClientConnectorService.State.CREATING, - ) - response = client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.GetClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, client_connector_services_service.ClientConnectorService) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == client_connector_services_service.ClientConnectorService.State.CREATING - - -def test_get_client_connector_service_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_connector_services_service.GetClientConnectorServiceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_client_connector_service(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_connector_services_service.GetClientConnectorServiceRequest( - name='name_value', - ) - -def test_get_client_connector_service_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_client_connector_service] = mock_rpc - request = {} - client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_client_connector_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_client_connector_service in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_client_connector_service] = mock_rpc - - request = {} - await client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_client_connector_service_async(transport: str = 'grpc_asyncio', request_type=client_connector_services_service.GetClientConnectorServiceRequest): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ClientConnectorService( - name='name_value', - display_name='display_name_value', - state=client_connector_services_service.ClientConnectorService.State.CREATING, - )) - response = await client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.GetClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, client_connector_services_service.ClientConnectorService) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == client_connector_services_service.ClientConnectorService.State.CREATING - - -@pytest.mark.asyncio -async def test_get_client_connector_service_async_from_dict(): - await test_get_client_connector_service_async(request_type=dict) - -def test_get_client_connector_service_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.GetClientConnectorServiceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - call.return_value = client_connector_services_service.ClientConnectorService() - client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_client_connector_service_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.GetClientConnectorServiceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ClientConnectorService()) - await client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_client_connector_service_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_connector_services_service.ClientConnectorService() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_client_connector_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_client_connector_service_flattened_error(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_client_connector_service( - client_connector_services_service.GetClientConnectorServiceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_client_connector_service_flattened_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_connector_services_service.ClientConnectorService() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ClientConnectorService()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_client_connector_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_client_connector_service_flattened_error_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_client_connector_service( - client_connector_services_service.GetClientConnectorServiceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.CreateClientConnectorServiceRequest, - dict, -]) -def test_create_client_connector_service(request_type, transport: str = 'grpc'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.CreateClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_client_connector_service_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_connector_services_service.CreateClientConnectorServiceRequest( - parent='parent_value', - client_connector_service_id='client_connector_service_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_client_connector_service(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_connector_services_service.CreateClientConnectorServiceRequest( - parent='parent_value', - client_connector_service_id='client_connector_service_id_value', - request_id='request_id_value', - ) - -def test_create_client_connector_service_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_client_connector_service] = mock_rpc - request = {} - client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_client_connector_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_client_connector_service in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_client_connector_service] = mock_rpc - - request = {} - await client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_client_connector_service_async(transport: str = 'grpc_asyncio', request_type=client_connector_services_service.CreateClientConnectorServiceRequest): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.CreateClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_client_connector_service_async_from_dict(): - await test_create_client_connector_service_async(request_type=dict) - -def test_create_client_connector_service_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.CreateClientConnectorServiceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_client_connector_service_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.CreateClientConnectorServiceRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_client_connector_service_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_client_connector_service( - parent='parent_value', - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - client_connector_service_id='client_connector_service_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].client_connector_service - mock_val = client_connector_services_service.ClientConnectorService(name='name_value') - assert arg == mock_val - arg = args[0].client_connector_service_id - mock_val = 'client_connector_service_id_value' - assert arg == mock_val - - -def test_create_client_connector_service_flattened_error(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_client_connector_service( - client_connector_services_service.CreateClientConnectorServiceRequest(), - parent='parent_value', - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - client_connector_service_id='client_connector_service_id_value', - ) - -@pytest.mark.asyncio -async def test_create_client_connector_service_flattened_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_client_connector_service( - parent='parent_value', - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - client_connector_service_id='client_connector_service_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].client_connector_service - mock_val = client_connector_services_service.ClientConnectorService(name='name_value') - assert arg == mock_val - arg = args[0].client_connector_service_id - mock_val = 'client_connector_service_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_client_connector_service_flattened_error_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_client_connector_service( - client_connector_services_service.CreateClientConnectorServiceRequest(), - parent='parent_value', - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - client_connector_service_id='client_connector_service_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.UpdateClientConnectorServiceRequest, - dict, -]) -def test_update_client_connector_service(request_type, transport: str = 'grpc'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.UpdateClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_update_client_connector_service_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_connector_services_service.UpdateClientConnectorServiceRequest( - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_client_connector_service(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_connector_services_service.UpdateClientConnectorServiceRequest( - request_id='request_id_value', - ) - -def test_update_client_connector_service_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_client_connector_service] = mock_rpc - request = {} - client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_client_connector_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_client_connector_service in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_client_connector_service] = mock_rpc - - request = {} - await client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_client_connector_service_async(transport: str = 'grpc_asyncio', request_type=client_connector_services_service.UpdateClientConnectorServiceRequest): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.UpdateClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_update_client_connector_service_async_from_dict(): - await test_update_client_connector_service_async(request_type=dict) - -def test_update_client_connector_service_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.UpdateClientConnectorServiceRequest() - - request.client_connector_service.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'client_connector_service.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_client_connector_service_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.UpdateClientConnectorServiceRequest() - - request.client_connector_service.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'client_connector_service.name=name_value', - ) in kw['metadata'] - - -def test_update_client_connector_service_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_client_connector_service( - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].client_connector_service - mock_val = client_connector_services_service.ClientConnectorService(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_client_connector_service_flattened_error(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_client_connector_service( - client_connector_services_service.UpdateClientConnectorServiceRequest(), - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_client_connector_service_flattened_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_client_connector_service( - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].client_connector_service - mock_val = client_connector_services_service.ClientConnectorService(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_client_connector_service_flattened_error_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_client_connector_service( - client_connector_services_service.UpdateClientConnectorServiceRequest(), - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.DeleteClientConnectorServiceRequest, - dict, -]) -def test_delete_client_connector_service(request_type, transport: str = 'grpc'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.DeleteClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_client_connector_service_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_connector_services_service.DeleteClientConnectorServiceRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_client_connector_service(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_connector_services_service.DeleteClientConnectorServiceRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_client_connector_service_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_client_connector_service] = mock_rpc - request = {} - client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_client_connector_service_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_client_connector_service in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_client_connector_service] = mock_rpc - - request = {} - await client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_client_connector_service_async(transport: str = 'grpc_asyncio', request_type=client_connector_services_service.DeleteClientConnectorServiceRequest): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_connector_services_service.DeleteClientConnectorServiceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_client_connector_service_async_from_dict(): - await test_delete_client_connector_service_async(request_type=dict) - -def test_delete_client_connector_service_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.DeleteClientConnectorServiceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_client_connector_service_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_connector_services_service.DeleteClientConnectorServiceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_client_connector_service_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_client_connector_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_client_connector_service_flattened_error(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_client_connector_service( - client_connector_services_service.DeleteClientConnectorServiceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_client_connector_service_flattened_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_client_connector_service( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_client_connector_service_flattened_error_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_client_connector_service( - client_connector_services_service.DeleteClientConnectorServiceRequest(), - name='name_value', - ) - - -def test_list_client_connector_services_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_client_connector_services in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_client_connector_services] = mock_rpc - - request = {} - client.list_client_connector_services(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_client_connector_services(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_client_connector_services_rest_required_fields(request_type=client_connector_services_service.ListClientConnectorServicesRequest): - transport_class = transports.ClientConnectorServicesServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_client_connector_services._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_client_connector_services._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = client_connector_services_service.ListClientConnectorServicesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_connector_services_service.ListClientConnectorServicesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_client_connector_services(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_client_connector_services_rest_unset_required_fields(): - transport = transports.ClientConnectorServicesServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_client_connector_services._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_client_connector_services_rest_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_connector_services_service.ListClientConnectorServicesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = client_connector_services_service.ListClientConnectorServicesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_client_connector_services(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/clientConnectorServices" % client.transport._host, args[1]) - - -def test_list_client_connector_services_rest_flattened_error(transport: str = 'rest'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_client_connector_services( - client_connector_services_service.ListClientConnectorServicesRequest(), - parent='parent_value', - ) - - -def test_list_client_connector_services_rest_pager(transport: str = 'rest'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='abc', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[], - next_page_token='def', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - ], - next_page_token='ghi', - ), - client_connector_services_service.ListClientConnectorServicesResponse( - client_connector_services=[ - client_connector_services_service.ClientConnectorService(), - client_connector_services_service.ClientConnectorService(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(client_connector_services_service.ListClientConnectorServicesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_client_connector_services(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, client_connector_services_service.ClientConnectorService) - for i in results) - - pages = list(client.list_client_connector_services(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_client_connector_service_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_client_connector_service] = mock_rpc - - request = {} - client.get_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_client_connector_service_rest_required_fields(request_type=client_connector_services_service.GetClientConnectorServiceRequest): - transport_class = transports.ClientConnectorServicesServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_client_connector_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_client_connector_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = client_connector_services_service.ClientConnectorService() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_connector_services_service.ClientConnectorService.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_client_connector_service(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_client_connector_service_rest_unset_required_fields(): - transport = transports.ClientConnectorServicesServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_client_connector_service._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_client_connector_service_rest_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_connector_services_service.ClientConnectorService() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = client_connector_services_service.ClientConnectorService.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_client_connector_service(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/clientConnectorServices/*}" % client.transport._host, args[1]) - - -def test_get_client_connector_service_rest_flattened_error(transport: str = 'rest'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_client_connector_service( - client_connector_services_service.GetClientConnectorServiceRequest(), - name='name_value', - ) - - -def test_create_client_connector_service_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_client_connector_service] = mock_rpc - - request = {} - client.create_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_client_connector_service_rest_required_fields(request_type=client_connector_services_service.CreateClientConnectorServiceRequest): - transport_class = transports.ClientConnectorServicesServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_client_connector_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_client_connector_service._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("client_connector_service_id", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_client_connector_service(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_client_connector_service_rest_unset_required_fields(): - transport = transports.ClientConnectorServicesServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_client_connector_service._get_unset_required_fields({}) - assert set(unset_fields) == (set(("clientConnectorServiceId", "requestId", "validateOnly", )) & set(("parent", "clientConnectorService", ))) - - -def test_create_client_connector_service_rest_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - client_connector_service_id='client_connector_service_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_client_connector_service(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/clientConnectorServices" % client.transport._host, args[1]) - - -def test_create_client_connector_service_rest_flattened_error(transport: str = 'rest'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_client_connector_service( - client_connector_services_service.CreateClientConnectorServiceRequest(), - parent='parent_value', - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - client_connector_service_id='client_connector_service_id_value', - ) - - -def test_update_client_connector_service_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_client_connector_service] = mock_rpc - - request = {} - client.update_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_client_connector_service_rest_required_fields(request_type=client_connector_services_service.UpdateClientConnectorServiceRequest): - transport_class = transports.ClientConnectorServicesServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_client_connector_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_client_connector_service._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "request_id", "update_mask", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_client_connector_service(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_client_connector_service_rest_unset_required_fields(): - transport = transports.ClientConnectorServicesServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_client_connector_service._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "requestId", "updateMask", "validateOnly", )) & set(("updateMask", "clientConnectorService", ))) - - -def test_update_client_connector_service_rest_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'client_connector_service': {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_client_connector_service(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{client_connector_service.name=projects/*/locations/*/clientConnectorServices/*}" % client.transport._host, args[1]) - - -def test_update_client_connector_service_rest_flattened_error(transport: str = 'rest'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_client_connector_service( - client_connector_services_service.UpdateClientConnectorServiceRequest(), - client_connector_service=client_connector_services_service.ClientConnectorService(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_client_connector_service_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_client_connector_service in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_client_connector_service] = mock_rpc - - request = {} - client.delete_client_connector_service(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_client_connector_service(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_client_connector_service_rest_required_fields(request_type=client_connector_services_service.DeleteClientConnectorServiceRequest): - transport_class = transports.ClientConnectorServicesServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_client_connector_service._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_client_connector_service._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_client_connector_service(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_client_connector_service_rest_unset_required_fields(): - transport = transports.ClientConnectorServicesServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_client_connector_service._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("name", ))) - - -def test_delete_client_connector_service_rest_flattened(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_client_connector_service(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/clientConnectorServices/*}" % client.transport._host, args[1]) - - -def test_delete_client_connector_service_rest_flattened_error(transport: str = 'rest'): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_client_connector_service( - client_connector_services_service.DeleteClientConnectorServiceRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClientConnectorServicesServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClientConnectorServicesServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClientConnectorServicesServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClientConnectorServicesServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ClientConnectorServicesServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ClientConnectorServicesServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ClientConnectorServicesServiceGrpcTransport, - transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, - transports.ClientConnectorServicesServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = ClientConnectorServicesServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_client_connector_services_empty_call_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - call.return_value = client_connector_services_service.ListClientConnectorServicesResponse() - client.list_client_connector_services(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.ListClientConnectorServicesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_client_connector_service_empty_call_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - call.return_value = client_connector_services_service.ClientConnectorService() - client.get_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.GetClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_client_connector_service_empty_call_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.CreateClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_client_connector_service_empty_call_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.update_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.UpdateClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_client_connector_service_empty_call_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.DeleteClientConnectorServiceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = ClientConnectorServicesServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_client_connector_services_empty_call_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ListClientConnectorServicesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_client_connector_services(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.ListClientConnectorServicesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_client_connector_service_empty_call_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_connector_services_service.ClientConnectorService( - name='name_value', - display_name='display_name_value', - state=client_connector_services_service.ClientConnectorService.State.CREATING, - )) - await client.get_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.GetClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_client_connector_service_empty_call_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.CreateClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_client_connector_service_empty_call_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.UpdateClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_client_connector_service_empty_call_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.DeleteClientConnectorServiceRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = ClientConnectorServicesServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_client_connector_services_rest_bad_request(request_type=client_connector_services_service.ListClientConnectorServicesRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_client_connector_services(request) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.ListClientConnectorServicesRequest, - dict, -]) -def test_list_client_connector_services_rest_call_success(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_connector_services_service.ListClientConnectorServicesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_connector_services_service.ListClientConnectorServicesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_client_connector_services(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientConnectorServicesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_client_connector_services_rest_interceptors(null_interceptor): - transport = transports.ClientConnectorServicesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientConnectorServicesServiceRestInterceptor(), - ) - client = ClientConnectorServicesServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_list_client_connector_services") as post, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_list_client_connector_services_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "pre_list_client_connector_services") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_connector_services_service.ListClientConnectorServicesRequest.pb(client_connector_services_service.ListClientConnectorServicesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = client_connector_services_service.ListClientConnectorServicesResponse.to_json(client_connector_services_service.ListClientConnectorServicesResponse()) - req.return_value.content = return_value - - request = client_connector_services_service.ListClientConnectorServicesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = client_connector_services_service.ListClientConnectorServicesResponse() - post_with_metadata.return_value = client_connector_services_service.ListClientConnectorServicesResponse(), metadata - - client.list_client_connector_services(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_client_connector_service_rest_bad_request(request_type=client_connector_services_service.GetClientConnectorServiceRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_client_connector_service(request) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.GetClientConnectorServiceRequest, - dict, -]) -def test_get_client_connector_service_rest_call_success(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_connector_services_service.ClientConnectorService( - name='name_value', - display_name='display_name_value', - state=client_connector_services_service.ClientConnectorService.State.CREATING, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_connector_services_service.ClientConnectorService.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_client_connector_service(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, client_connector_services_service.ClientConnectorService) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == client_connector_services_service.ClientConnectorService.State.CREATING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_client_connector_service_rest_interceptors(null_interceptor): - transport = transports.ClientConnectorServicesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientConnectorServicesServiceRestInterceptor(), - ) - client = ClientConnectorServicesServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_get_client_connector_service") as post, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_get_client_connector_service_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "pre_get_client_connector_service") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_connector_services_service.GetClientConnectorServiceRequest.pb(client_connector_services_service.GetClientConnectorServiceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = client_connector_services_service.ClientConnectorService.to_json(client_connector_services_service.ClientConnectorService()) - req.return_value.content = return_value - - request = client_connector_services_service.GetClientConnectorServiceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = client_connector_services_service.ClientConnectorService() - post_with_metadata.return_value = client_connector_services_service.ClientConnectorService(), metadata - - client.get_client_connector_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_client_connector_service_rest_bad_request(request_type=client_connector_services_service.CreateClientConnectorServiceRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_client_connector_service(request) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.CreateClientConnectorServiceRequest, - dict, -]) -def test_create_client_connector_service_rest_call_success(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["client_connector_service"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'display_name': 'display_name_value', 'ingress': {'config': {'transport_protocol': 1, 'destination_routes': [{'address': 'address_value', 'netmask': 'netmask_value'}]}}, 'egress': {'peered_vpc': {'network_vpc': 'network_vpc_value'}}, 'state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = client_connector_services_service.CreateClientConnectorServiceRequest.meta.fields["client_connector_service"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["client_connector_service"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["client_connector_service"][field])): - del request_init["client_connector_service"][field][i][subfield] - else: - del request_init["client_connector_service"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_client_connector_service(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_client_connector_service_rest_interceptors(null_interceptor): - transport = transports.ClientConnectorServicesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientConnectorServicesServiceRestInterceptor(), - ) - client = ClientConnectorServicesServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_create_client_connector_service") as post, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_create_client_connector_service_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "pre_create_client_connector_service") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_connector_services_service.CreateClientConnectorServiceRequest.pb(client_connector_services_service.CreateClientConnectorServiceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = client_connector_services_service.CreateClientConnectorServiceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_client_connector_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_client_connector_service_rest_bad_request(request_type=client_connector_services_service.UpdateClientConnectorServiceRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'client_connector_service': {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_client_connector_service(request) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.UpdateClientConnectorServiceRequest, - dict, -]) -def test_update_client_connector_service_rest_call_success(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'client_connector_service': {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'}} - request_init["client_connector_service"] = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'display_name': 'display_name_value', 'ingress': {'config': {'transport_protocol': 1, 'destination_routes': [{'address': 'address_value', 'netmask': 'netmask_value'}]}}, 'egress': {'peered_vpc': {'network_vpc': 'network_vpc_value'}}, 'state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = client_connector_services_service.UpdateClientConnectorServiceRequest.meta.fields["client_connector_service"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["client_connector_service"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["client_connector_service"][field])): - del request_init["client_connector_service"][field][i][subfield] - else: - del request_init["client_connector_service"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_client_connector_service(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_client_connector_service_rest_interceptors(null_interceptor): - transport = transports.ClientConnectorServicesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientConnectorServicesServiceRestInterceptor(), - ) - client = ClientConnectorServicesServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_update_client_connector_service") as post, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_update_client_connector_service_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "pre_update_client_connector_service") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_connector_services_service.UpdateClientConnectorServiceRequest.pb(client_connector_services_service.UpdateClientConnectorServiceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = client_connector_services_service.UpdateClientConnectorServiceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_client_connector_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_client_connector_service_rest_bad_request(request_type=client_connector_services_service.DeleteClientConnectorServiceRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_client_connector_service(request) - - -@pytest.mark.parametrize("request_type", [ - client_connector_services_service.DeleteClientConnectorServiceRequest, - dict, -]) -def test_delete_client_connector_service_rest_call_success(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientConnectorServices/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_client_connector_service(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_client_connector_service_rest_interceptors(null_interceptor): - transport = transports.ClientConnectorServicesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientConnectorServicesServiceRestInterceptor(), - ) - client = ClientConnectorServicesServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_delete_client_connector_service") as post, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "post_delete_client_connector_service_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientConnectorServicesServiceRestInterceptor, "pre_delete_client_connector_service") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_connector_services_service.DeleteClientConnectorServiceRequest.pb(client_connector_services_service.DeleteClientConnectorServiceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = client_connector_services_service.DeleteClientConnectorServiceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_client_connector_service(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_client_connector_services_empty_call_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_client_connector_services), - '__call__') as call: - client.list_client_connector_services(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.ListClientConnectorServicesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_client_connector_service_empty_call_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_connector_service), - '__call__') as call: - client.get_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.GetClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_client_connector_service_empty_call_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_connector_service), - '__call__') as call: - client.create_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.CreateClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_client_connector_service_empty_call_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_client_connector_service), - '__call__') as call: - client.update_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.UpdateClientConnectorServiceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_client_connector_service_empty_call_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_connector_service), - '__call__') as call: - client.delete_client_connector_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_connector_services_service.DeleteClientConnectorServiceRequest() - - assert args[0] == request_msg - - -def test_client_connector_services_service_rest_lro_client(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ClientConnectorServicesServiceGrpcTransport, - ) - -def test_client_connector_services_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ClientConnectorServicesServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_client_connector_services_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.transports.ClientConnectorServicesServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ClientConnectorServicesServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_client_connector_services', - 'get_client_connector_service', - 'create_client_connector_service', - 'update_client_connector_service', - 'delete_client_connector_service', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_client_connector_services_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.transports.ClientConnectorServicesServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ClientConnectorServicesServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_client_connector_services_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.beyondcorp_clientconnectorservices_v1.services.client_connector_services_service.transports.ClientConnectorServicesServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ClientConnectorServicesServiceTransport() - adc.assert_called_once() - - -def test_client_connector_services_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ClientConnectorServicesServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClientConnectorServicesServiceGrpcTransport, - transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, - ], -) -def test_client_connector_services_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClientConnectorServicesServiceGrpcTransport, - transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, - transports.ClientConnectorServicesServiceRestTransport, - ], -) -def test_client_connector_services_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ClientConnectorServicesServiceGrpcTransport, grpc_helpers), - (transports.ClientConnectorServicesServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_client_connector_services_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ClientConnectorServicesServiceGrpcTransport, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport]) -def test_client_connector_services_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_client_connector_services_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ClientConnectorServicesServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_client_connector_services_service_host_no_port(transport_name): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_client_connector_services_service_host_with_port(transport_name): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_client_connector_services_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ClientConnectorServicesServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ClientConnectorServicesServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_client_connector_services._session - session2 = client2.transport.list_client_connector_services._session - assert session1 != session2 - session1 = client1.transport.get_client_connector_service._session - session2 = client2.transport.get_client_connector_service._session - assert session1 != session2 - session1 = client1.transport.create_client_connector_service._session - session2 = client2.transport.create_client_connector_service._session - assert session1 != session2 - session1 = client1.transport.update_client_connector_service._session - session2 = client2.transport.update_client_connector_service._session - assert session1 != session2 - session1 = client1.transport.delete_client_connector_service._session - session2 = client2.transport.delete_client_connector_service._session - assert session1 != session2 -def test_client_connector_services_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ClientConnectorServicesServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_client_connector_services_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ClientConnectorServicesServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ClientConnectorServicesServiceGrpcTransport, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport]) -def test_client_connector_services_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ClientConnectorServicesServiceGrpcTransport, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport]) -def test_client_connector_services_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_client_connector_services_service_grpc_lro_client(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_client_connector_services_service_grpc_lro_async_client(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_client_connector_service_path(): - project = "squid" - location = "clam" - client_connector_service = "whelk" - expected = "projects/{project}/locations/{location}/clientConnectorServices/{client_connector_service}".format(project=project, location=location, client_connector_service=client_connector_service, ) - actual = ClientConnectorServicesServiceClient.client_connector_service_path(project, location, client_connector_service) - assert expected == actual - - -def test_parse_client_connector_service_path(): - expected = { - "project": "octopus", - "location": "oyster", - "client_connector_service": "nudibranch", - } - path = ClientConnectorServicesServiceClient.client_connector_service_path(**expected) - - # Check that the path construction is reversible. - actual = ClientConnectorServicesServiceClient.parse_client_connector_service_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ClientConnectorServicesServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = ClientConnectorServicesServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ClientConnectorServicesServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = ClientConnectorServicesServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = ClientConnectorServicesServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ClientConnectorServicesServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ClientConnectorServicesServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = ClientConnectorServicesServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ClientConnectorServicesServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = ClientConnectorServicesServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = ClientConnectorServicesServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ClientConnectorServicesServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ClientConnectorServicesServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = ClientConnectorServicesServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ClientConnectorServicesServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ClientConnectorServicesServiceTransport, '_prep_wrapped_messages') as prep: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ClientConnectorServicesServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ClientConnectorServicesServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = ClientConnectorServicesServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = ClientConnectorServicesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ClientConnectorServicesServiceClient, transports.ClientConnectorServicesServiceGrpcTransport), - (ClientConnectorServicesServiceAsyncClient, transports.ClientConnectorServicesServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/.coveragerc b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/.coveragerc deleted file mode 100644 index 2093aa9a281d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/beyondcorp_clientgateways/__init__.py - google/cloud/beyondcorp_clientgateways/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/.flake8 b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/MANIFEST.in b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/MANIFEST.in deleted file mode 100644 index af0cde2a5192..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/beyondcorp_clientgateways *.py -recursive-include google/cloud/beyondcorp_clientgateways_v1 *.py diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/README.rst b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/README.rst deleted file mode 100644 index 217e2c6c4636..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Beyondcorp Clientgateways API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Beyondcorp Clientgateways API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/client_gateways_service.rst b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/client_gateways_service.rst deleted file mode 100644 index 3cc76dc49159..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/client_gateways_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ClientGatewaysService ---------------------------------------- - -.. automodule:: google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service - :members: - :inherited-members: - -.. automodule:: google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/services_.rst b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/services_.rst deleted file mode 100644 index 7b582f7c3fc8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Beyondcorp Clientgateways v1 API -========================================================== -.. toctree:: - :maxdepth: 2 - - client_gateways_service diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/types_.rst b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/types_.rst deleted file mode 100644 index 8fc852799935..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/beyondcorp_clientgateways_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Beyondcorp Clientgateways v1 API -======================================================= - -.. automodule:: google.cloud.beyondcorp_clientgateways_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/conf.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/conf.py deleted file mode 100644 index 5c6e26498514..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-beyondcorp-clientgateways documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-beyondcorp-clientgateways" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-beyondcorp-clientgateways-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-clientgateways.tex", - u"google-cloud-beyondcorp-clientgateways Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-beyondcorp-clientgateways", - u"Google Cloud Beyondcorp Clientgateways Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-beyondcorp-clientgateways", - u"google-cloud-beyondcorp-clientgateways Documentation", - author, - "google-cloud-beyondcorp-clientgateways", - "GAPIC library for Google Cloud Beyondcorp Clientgateways API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/index.rst b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/index.rst deleted file mode 100644 index 2b2258f6a11e..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - beyondcorp_clientgateways_v1/services_ - beyondcorp_clientgateways_v1/types_ diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/__init__.py deleted file mode 100644 index 8a0df2e56088..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_clientgateways import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.client import ClientGatewaysServiceClient -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.async_client import ClientGatewaysServiceAsyncClient - -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import ClientGateway -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import ClientGatewayOperationMetadata -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import CreateClientGatewayRequest -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import DeleteClientGatewayRequest -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import GetClientGatewayRequest -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import ListClientGatewaysRequest -from google.cloud.beyondcorp_clientgateways_v1.types.client_gateways_service import ListClientGatewaysResponse - -__all__ = ('ClientGatewaysServiceClient', - 'ClientGatewaysServiceAsyncClient', - 'ClientGateway', - 'ClientGatewayOperationMetadata', - 'CreateClientGatewayRequest', - 'DeleteClientGatewayRequest', - 'GetClientGatewayRequest', - 'ListClientGatewaysRequest', - 'ListClientGatewaysResponse', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/py.typed b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/py.typed deleted file mode 100644 index 46bc44a92f73..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-clientgateways package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/__init__.py deleted file mode 100644 index 9911f4f611df..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/__init__.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.beyondcorp_clientgateways_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.client_gateways_service import ClientGatewaysServiceClient -from .services.client_gateways_service import ClientGatewaysServiceAsyncClient - -from .types.client_gateways_service import ClientGateway -from .types.client_gateways_service import ClientGatewayOperationMetadata -from .types.client_gateways_service import CreateClientGatewayRequest -from .types.client_gateways_service import DeleteClientGatewayRequest -from .types.client_gateways_service import GetClientGatewayRequest -from .types.client_gateways_service import ListClientGatewaysRequest -from .types.client_gateways_service import ListClientGatewaysResponse - -__all__ = ( - 'ClientGatewaysServiceAsyncClient', -'ClientGateway', -'ClientGatewayOperationMetadata', -'ClientGatewaysServiceClient', -'CreateClientGatewayRequest', -'DeleteClientGatewayRequest', -'GetClientGatewayRequest', -'ListClientGatewaysRequest', -'ListClientGatewaysResponse', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/gapic_metadata.json deleted file mode 100644 index 3e3fd8edcdd8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/gapic_metadata.json +++ /dev/null @@ -1,88 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.beyondcorp_clientgateways_v1", - "protoPackage": "google.cloud.beyondcorp.clientgateways.v1", - "schema": "1.0", - "services": { - "ClientGatewaysService": { - "clients": { - "grpc": { - "libraryClient": "ClientGatewaysServiceClient", - "rpcs": { - "CreateClientGateway": { - "methods": [ - "create_client_gateway" - ] - }, - "DeleteClientGateway": { - "methods": [ - "delete_client_gateway" - ] - }, - "GetClientGateway": { - "methods": [ - "get_client_gateway" - ] - }, - "ListClientGateways": { - "methods": [ - "list_client_gateways" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ClientGatewaysServiceAsyncClient", - "rpcs": { - "CreateClientGateway": { - "methods": [ - "create_client_gateway" - ] - }, - "DeleteClientGateway": { - "methods": [ - "delete_client_gateway" - ] - }, - "GetClientGateway": { - "methods": [ - "get_client_gateway" - ] - }, - "ListClientGateways": { - "methods": [ - "list_client_gateways" - ] - } - } - }, - "rest": { - "libraryClient": "ClientGatewaysServiceClient", - "rpcs": { - "CreateClientGateway": { - "methods": [ - "create_client_gateway" - ] - }, - "DeleteClientGateway": { - "methods": [ - "delete_client_gateway" - ] - }, - "GetClientGateway": { - "methods": [ - "get_client_gateway" - ] - }, - "ListClientGateways": { - "methods": [ - "list_client_gateways" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/py.typed b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/py.typed deleted file mode 100644 index 46bc44a92f73..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-beyondcorp-clientgateways package uses inline types. diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/__init__.py deleted file mode 100644 index 8fdbce31e1ab..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ClientGatewaysServiceClient -from .async_client import ClientGatewaysServiceAsyncClient - -__all__ = ( - 'ClientGatewaysServiceClient', - 'ClientGatewaysServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py deleted file mode 100644 index 2f03132b5b45..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py +++ /dev/null @@ -1,1378 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.beyondcorp_clientgateways_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service import pagers -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ClientGatewaysServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ClientGatewaysServiceGrpcAsyncIOTransport -from .client import ClientGatewaysServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class ClientGatewaysServiceAsyncClient: - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientGatewaysService exposes the following resources: - - - Client Gateways, named as follows: - ``projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}``. - """ - - _client: ClientGatewaysServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = ClientGatewaysServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ClientGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ClientGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = ClientGatewaysServiceClient._DEFAULT_UNIVERSE - - client_gateway_path = staticmethod(ClientGatewaysServiceClient.client_gateway_path) - parse_client_gateway_path = staticmethod(ClientGatewaysServiceClient.parse_client_gateway_path) - common_billing_account_path = staticmethod(ClientGatewaysServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ClientGatewaysServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ClientGatewaysServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ClientGatewaysServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ClientGatewaysServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ClientGatewaysServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ClientGatewaysServiceClient.common_project_path) - parse_common_project_path = staticmethod(ClientGatewaysServiceClient.parse_common_project_path) - common_location_path = staticmethod(ClientGatewaysServiceClient.common_location_path) - parse_common_location_path = staticmethod(ClientGatewaysServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientGatewaysServiceAsyncClient: The constructed client. - """ - return ClientGatewaysServiceClient.from_service_account_info.__func__(ClientGatewaysServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientGatewaysServiceAsyncClient: The constructed client. - """ - return ClientGatewaysServiceClient.from_service_account_file.__func__(ClientGatewaysServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ClientGatewaysServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ClientGatewaysServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ClientGatewaysServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = ClientGatewaysServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ClientGatewaysServiceTransport, Callable[..., ClientGatewaysServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the client gateways service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ClientGatewaysServiceTransport,Callable[..., ClientGatewaysServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ClientGatewaysServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ClientGatewaysServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "credentialsType": None, - } - ) - - async def list_client_gateways(self, - request: Optional[Union[client_gateways_service.ListClientGatewaysRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListClientGatewaysAsyncPager: - r"""Lists ClientGateways in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - async def sample_list_client_gateways(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.ListClientGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_gateways(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysRequest, dict]]): - The request object. Message for requesting list of - ClientGateways. - parent (:class:`str`): - Required. Parent value for - ListClientGatewaysRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.pagers.ListClientGatewaysAsyncPager: - Message for response to listing - ClientGateways. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.ListClientGatewaysRequest): - request = client_gateways_service.ListClientGatewaysRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_client_gateways] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListClientGatewaysAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_client_gateway(self, - request: Optional[Union[client_gateways_service.GetClientGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_gateways_service.ClientGateway: - r"""Gets details of a single ClientGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - async def sample_get_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.GetClientGatewayRequest( - name="name_value", - ) - - # Make the request - response = await client.get_client_gateway(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientgateways_v1.types.GetClientGatewayRequest, dict]]): - The request object. Message for getting a ClientGateway. - name (:class:`str`): - Required. Name of the resource - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway: - Message describing ClientGateway - object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.GetClientGatewayRequest): - request = client_gateways_service.GetClientGatewayRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_client_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_client_gateway(self, - request: Optional[Union[client_gateways_service.CreateClientGatewayRequest, dict]] = None, - *, - parent: Optional[str] = None, - client_gateway: Optional[client_gateways_service.ClientGateway] = None, - client_gateway_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a new ClientGateway in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - async def sample_create_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - client_gateway = beyondcorp_clientgateways_v1.ClientGateway() - client_gateway.name = "name_value" - - request = beyondcorp_clientgateways_v1.CreateClientGatewayRequest( - parent="parent_value", - client_gateway=client_gateway, - ) - - # Make the request - operation = client.create_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientgateways_v1.types.CreateClientGatewayRequest, dict]]): - The request object. Message for creating a ClientGateway. - parent (:class:`str`): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_gateway (:class:`google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway`): - Required. The resource being created. - This corresponds to the ``client_gateway`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_gateway_id (:class:`str`): - Optional. User-settable client gateway resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``client_gateway_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway` - Message describing ClientGateway object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, client_gateway, client_gateway_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.CreateClientGatewayRequest): - request = client_gateways_service.CreateClientGatewayRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if client_gateway is not None: - request.client_gateway = client_gateway - if client_gateway_id is not None: - request.client_gateway_id = client_gateway_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_client_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - client_gateways_service.ClientGateway, - metadata_type=client_gateways_service.ClientGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - async def delete_client_gateway(self, - request: Optional[Union[client_gateways_service.DeleteClientGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a single ClientGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - async def sample_delete_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.DeleteClientGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.beyondcorp_clientgateways_v1.types.DeleteClientGatewayRequest, dict]]): - The request object. Message for deleting a ClientGateway - name (:class:`str`): - Required. Name of the resource - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.DeleteClientGatewayRequest): - request = client_gateways_service.DeleteClientGatewayRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_client_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=client_gateways_service.ClientGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - async def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ClientGatewaysServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ClientGatewaysServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py deleted file mode 100644 index 6707838da80f..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py +++ /dev/null @@ -1,1759 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.beyondcorp_clientgateways_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service import pagers -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import ClientGatewaysServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ClientGatewaysServiceGrpcTransport -from .transports.grpc_asyncio import ClientGatewaysServiceGrpcAsyncIOTransport -from .transports.rest import ClientGatewaysServiceRestTransport - - -class ClientGatewaysServiceClientMeta(type): - """Metaclass for the ClientGatewaysService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ClientGatewaysServiceTransport]] - _transport_registry["grpc"] = ClientGatewaysServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ClientGatewaysServiceGrpcAsyncIOTransport - _transport_registry["rest"] = ClientGatewaysServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ClientGatewaysServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ClientGatewaysServiceClient(metaclass=ClientGatewaysServiceClientMeta): - """API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientGatewaysService exposes the following resources: - - - Client Gateways, named as follows: - ``projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}``. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "beyondcorp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "beyondcorp.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientGatewaysServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ClientGatewaysServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ClientGatewaysServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ClientGatewaysServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def client_gateway_path(project: str,location: str,client_gateway: str,) -> str: - """Returns a fully-qualified client_gateway string.""" - return "projects/{project}/locations/{location}/clientGateways/{client_gateway}".format(project=project, location=location, client_gateway=client_gateway, ) - - @staticmethod - def parse_client_gateway_path(path: str) -> Dict[str,str]: - """Parses a client_gateway path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/clientGateways/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = ClientGatewaysServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = ClientGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ClientGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ClientGatewaysServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ClientGatewaysServiceTransport, Callable[..., ClientGatewaysServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the client gateways service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ClientGatewaysServiceTransport,Callable[..., ClientGatewaysServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ClientGatewaysServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ClientGatewaysServiceClient._read_environment_variables() - self._client_cert_source = ClientGatewaysServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ClientGatewaysServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ClientGatewaysServiceTransport) - if transport_provided: - # transport is a ClientGatewaysServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ClientGatewaysServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - ClientGatewaysServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[ClientGatewaysServiceTransport], Callable[..., ClientGatewaysServiceTransport]] = ( - ClientGatewaysServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ClientGatewaysServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient`.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "credentialsType": None, - } - ) - - def list_client_gateways(self, - request: Optional[Union[client_gateways_service.ListClientGatewaysRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListClientGatewaysPager: - r"""Lists ClientGateways in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - def sample_list_client_gateways(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.ListClientGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_gateways(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysRequest, dict]): - The request object. Message for requesting list of - ClientGateways. - parent (str): - Required. Parent value for - ListClientGatewaysRequest. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.pagers.ListClientGatewaysPager: - Message for response to listing - ClientGateways. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.ListClientGatewaysRequest): - request = client_gateways_service.ListClientGatewaysRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_client_gateways] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListClientGatewaysPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_client_gateway(self, - request: Optional[Union[client_gateways_service.GetClientGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> client_gateways_service.ClientGateway: - r"""Gets details of a single ClientGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - def sample_get_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.GetClientGatewayRequest( - name="name_value", - ) - - # Make the request - response = client.get_client_gateway(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientgateways_v1.types.GetClientGatewayRequest, dict]): - The request object. Message for getting a ClientGateway. - name (str): - Required. Name of the resource - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway: - Message describing ClientGateway - object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.GetClientGatewayRequest): - request = client_gateways_service.GetClientGatewayRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_client_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_client_gateway(self, - request: Optional[Union[client_gateways_service.CreateClientGatewayRequest, dict]] = None, - *, - parent: Optional[str] = None, - client_gateway: Optional[client_gateways_service.ClientGateway] = None, - client_gateway_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a new ClientGateway in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - def sample_create_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - client_gateway = beyondcorp_clientgateways_v1.ClientGateway() - client_gateway.name = "name_value" - - request = beyondcorp_clientgateways_v1.CreateClientGatewayRequest( - parent="parent_value", - client_gateway=client_gateway, - ) - - # Make the request - operation = client.create_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientgateways_v1.types.CreateClientGatewayRequest, dict]): - The request object. Message for creating a ClientGateway. - parent (str): - Required. Value for parent. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_gateway (google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway): - Required. The resource being created. - This corresponds to the ``client_gateway`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - client_gateway_id (str): - Optional. User-settable client gateway resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - - This corresponds to the ``client_gateway_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway` - Message describing ClientGateway object. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, client_gateway, client_gateway_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.CreateClientGatewayRequest): - request = client_gateways_service.CreateClientGatewayRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if client_gateway is not None: - request.client_gateway = client_gateway - if client_gateway_id is not None: - request.client_gateway_id = client_gateway_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_client_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - client_gateways_service.ClientGateway, - metadata_type=client_gateways_service.ClientGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - def delete_client_gateway(self, - request: Optional[Union[client_gateways_service.DeleteClientGatewayRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a single ClientGateway. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import beyondcorp_clientgateways_v1 - - def sample_delete_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.DeleteClientGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.beyondcorp_clientgateways_v1.types.DeleteClientGatewayRequest, dict]): - The request object. Message for deleting a ClientGateway - name (str): - Required. Name of the resource - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, client_gateways_service.DeleteClientGatewayRequest): - request = client_gateways_service.DeleteClientGatewayRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_client_gateway] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=client_gateways_service.ClientGatewayOperationMetadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ClientGatewaysServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def list_operations( - self, - request: Optional[operations_pb2.ListOperationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.ListOperationsResponse: - r"""Lists operations that match the specified filter in the request. - - Args: - request (:class:`~.operations_pb2.ListOperationsRequest`): - The request object. Request message for - `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.ListOperationsResponse: - Response message for ``ListOperations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.ListOperationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_operations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def delete_operation( - self, - request: Optional[operations_pb2.DeleteOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a long-running operation. - - This method indicates that the client is no longer interested - in the operation result. It does not cancel the operation. - If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.DeleteOperationRequest`): - The request object. Request message for - `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.DeleteOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_operation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ClientGatewaysServiceClient", -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/pagers.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/pagers.py deleted file mode 100644 index 8af8931bc425..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service - - -class ListClientGatewaysPager: - """A pager for iterating through ``list_client_gateways`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysResponse` object, and - provides an ``__iter__`` method to iterate through its - ``client_gateways`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListClientGateways`` requests and continue to iterate - through the ``client_gateways`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., client_gateways_service.ListClientGatewaysResponse], - request: client_gateways_service.ListClientGatewaysRequest, - response: client_gateways_service.ListClientGatewaysResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysRequest): - The initial request object. - response (google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = client_gateways_service.ListClientGatewaysRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[client_gateways_service.ListClientGatewaysResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[client_gateways_service.ClientGateway]: - for page in self.pages: - yield from page.client_gateways - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListClientGatewaysAsyncPager: - """A pager for iterating through ``list_client_gateways`` requests. - - This class thinly wraps an initial - :class:`google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``client_gateways`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListClientGateways`` requests and continue to iterate - through the ``client_gateways`` field on the - corresponding responses. - - All the usual :class:`google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[client_gateways_service.ListClientGatewaysResponse]], - request: client_gateways_service.ListClientGatewaysRequest, - response: client_gateways_service.ListClientGatewaysResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysRequest): - The initial request object. - response (google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = client_gateways_service.ListClientGatewaysRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[client_gateways_service.ListClientGatewaysResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[client_gateways_service.ClientGateway]: - async def async_generator(): - async for page in self.pages: - for response in page.client_gateways: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/README.rst b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/README.rst deleted file mode 100644 index 10558ab764b4..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`ClientGatewaysServiceTransport` is the ABC for all transports. -- public child `ClientGatewaysServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ClientGatewaysServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseClientGatewaysServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ClientGatewaysServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/__init__.py deleted file mode 100644 index 9d8fc332472d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ClientGatewaysServiceTransport -from .grpc import ClientGatewaysServiceGrpcTransport -from .grpc_asyncio import ClientGatewaysServiceGrpcAsyncIOTransport -from .rest import ClientGatewaysServiceRestTransport -from .rest import ClientGatewaysServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ClientGatewaysServiceTransport]] -_transport_registry['grpc'] = ClientGatewaysServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ClientGatewaysServiceGrpcAsyncIOTransport -_transport_registry['rest'] = ClientGatewaysServiceRestTransport - -__all__ = ( - 'ClientGatewaysServiceTransport', - 'ClientGatewaysServiceGrpcTransport', - 'ClientGatewaysServiceGrpcAsyncIOTransport', - 'ClientGatewaysServiceRestTransport', - 'ClientGatewaysServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/base.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/base.py deleted file mode 100644 index e0ec0dc339b8..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/base.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.beyondcorp_clientgateways_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class ClientGatewaysServiceTransport(abc.ABC): - """Abstract transport class for ClientGatewaysService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'beyondcorp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_client_gateways: gapic_v1.method.wrap_method( - self.list_client_gateways, - default_timeout=None, - client_info=client_info, - ), - self.get_client_gateway: gapic_v1.method.wrap_method( - self.get_client_gateway, - default_timeout=None, - client_info=client_info, - ), - self.create_client_gateway: gapic_v1.method.wrap_method( - self.create_client_gateway, - default_timeout=None, - client_info=client_info, - ), - self.delete_client_gateway: gapic_v1.method.wrap_method( - self.delete_client_gateway, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: gapic_v1.method.wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: gapic_v1.method.wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: gapic_v1.method.wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: gapic_v1.method.wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_client_gateways(self) -> Callable[ - [client_gateways_service.ListClientGatewaysRequest], - Union[ - client_gateways_service.ListClientGatewaysResponse, - Awaitable[client_gateways_service.ListClientGatewaysResponse] - ]]: - raise NotImplementedError() - - @property - def get_client_gateway(self) -> Callable[ - [client_gateways_service.GetClientGatewayRequest], - Union[ - client_gateways_service.ClientGateway, - Awaitable[client_gateways_service.ClientGateway] - ]]: - raise NotImplementedError() - - @property - def create_client_gateway(self) -> Callable[ - [client_gateways_service.CreateClientGatewayRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def delete_client_gateway(self) -> Callable[ - [client_gateways_service.DeleteClientGatewayRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def list_operations( - self, - ) -> Callable[ - [operations_pb2.ListOperationsRequest], - Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], - ]: - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def delete_operation( - self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ClientGatewaysServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/grpc.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/grpc.py deleted file mode 100644 index cc300961774a..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/grpc.py +++ /dev/null @@ -1,645 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import ClientGatewaysServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ClientGatewaysServiceGrpcTransport(ClientGatewaysServiceTransport): - """gRPC backend transport for ClientGatewaysService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientGatewaysService exposes the following resources: - - - Client Gateways, named as follows: - ``projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_client_gateways(self) -> Callable[ - [client_gateways_service.ListClientGatewaysRequest], - client_gateways_service.ListClientGatewaysResponse]: - r"""Return a callable for the list client gateways method over gRPC. - - Lists ClientGateways in a given project and location. - - Returns: - Callable[[~.ListClientGatewaysRequest], - ~.ListClientGatewaysResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_client_gateways' not in self._stubs: - self._stubs['list_client_gateways'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/ListClientGateways', - request_serializer=client_gateways_service.ListClientGatewaysRequest.serialize, - response_deserializer=client_gateways_service.ListClientGatewaysResponse.deserialize, - ) - return self._stubs['list_client_gateways'] - - @property - def get_client_gateway(self) -> Callable[ - [client_gateways_service.GetClientGatewayRequest], - client_gateways_service.ClientGateway]: - r"""Return a callable for the get client gateway method over gRPC. - - Gets details of a single ClientGateway. - - Returns: - Callable[[~.GetClientGatewayRequest], - ~.ClientGateway]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_client_gateway' not in self._stubs: - self._stubs['get_client_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/GetClientGateway', - request_serializer=client_gateways_service.GetClientGatewayRequest.serialize, - response_deserializer=client_gateways_service.ClientGateway.deserialize, - ) - return self._stubs['get_client_gateway'] - - @property - def create_client_gateway(self) -> Callable[ - [client_gateways_service.CreateClientGatewayRequest], - operations_pb2.Operation]: - r"""Return a callable for the create client gateway method over gRPC. - - Creates a new ClientGateway in a given project and - location. - - Returns: - Callable[[~.CreateClientGatewayRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_client_gateway' not in self._stubs: - self._stubs['create_client_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/CreateClientGateway', - request_serializer=client_gateways_service.CreateClientGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_client_gateway'] - - @property - def delete_client_gateway(self) -> Callable[ - [client_gateways_service.DeleteClientGatewayRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete client gateway method over gRPC. - - Deletes a single ClientGateway. - - Returns: - Callable[[~.DeleteClientGatewayRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_client_gateway' not in self._stubs: - self._stubs['delete_client_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/DeleteClientGateway', - request_serializer=client_gateways_service.DeleteClientGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_client_gateway'] - - def close(self): - self._logged_channel.close() - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ClientGatewaysServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/grpc_asyncio.py deleted file mode 100644 index 298e000523c6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,725 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from .base import ClientGatewaysServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ClientGatewaysServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ClientGatewaysServiceGrpcAsyncIOTransport(ClientGatewaysServiceTransport): - """gRPC AsyncIO backend transport for ClientGatewaysService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientGatewaysService exposes the following resources: - - - Client Gateways, named as follows: - ``projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_client_gateways(self) -> Callable[ - [client_gateways_service.ListClientGatewaysRequest], - Awaitable[client_gateways_service.ListClientGatewaysResponse]]: - r"""Return a callable for the list client gateways method over gRPC. - - Lists ClientGateways in a given project and location. - - Returns: - Callable[[~.ListClientGatewaysRequest], - Awaitable[~.ListClientGatewaysResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_client_gateways' not in self._stubs: - self._stubs['list_client_gateways'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/ListClientGateways', - request_serializer=client_gateways_service.ListClientGatewaysRequest.serialize, - response_deserializer=client_gateways_service.ListClientGatewaysResponse.deserialize, - ) - return self._stubs['list_client_gateways'] - - @property - def get_client_gateway(self) -> Callable[ - [client_gateways_service.GetClientGatewayRequest], - Awaitable[client_gateways_service.ClientGateway]]: - r"""Return a callable for the get client gateway method over gRPC. - - Gets details of a single ClientGateway. - - Returns: - Callable[[~.GetClientGatewayRequest], - Awaitable[~.ClientGateway]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_client_gateway' not in self._stubs: - self._stubs['get_client_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/GetClientGateway', - request_serializer=client_gateways_service.GetClientGatewayRequest.serialize, - response_deserializer=client_gateways_service.ClientGateway.deserialize, - ) - return self._stubs['get_client_gateway'] - - @property - def create_client_gateway(self) -> Callable[ - [client_gateways_service.CreateClientGatewayRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the create client gateway method over gRPC. - - Creates a new ClientGateway in a given project and - location. - - Returns: - Callable[[~.CreateClientGatewayRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_client_gateway' not in self._stubs: - self._stubs['create_client_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/CreateClientGateway', - request_serializer=client_gateways_service.CreateClientGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['create_client_gateway'] - - @property - def delete_client_gateway(self) -> Callable[ - [client_gateways_service.DeleteClientGatewayRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete client gateway method over gRPC. - - Deletes a single ClientGateway. - - Returns: - Callable[[~.DeleteClientGatewayRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_client_gateway' not in self._stubs: - self._stubs['delete_client_gateway'] = self._logged_channel.unary_unary( - '/google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService/DeleteClientGateway', - request_serializer=client_gateways_service.DeleteClientGatewayRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_client_gateway'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_client_gateways: self._wrap_method( - self.list_client_gateways, - default_timeout=None, - client_info=client_info, - ), - self.get_client_gateway: self._wrap_method( - self.get_client_gateway, - default_timeout=None, - client_info=client_info, - ), - self.create_client_gateway: self._wrap_method( - self.create_client_gateway, - default_timeout=None, - client_info=client_info, - ), - self.delete_client_gateway: self._wrap_method( - self.delete_client_gateway, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.cancel_operation: self._wrap_method( - self.cancel_operation, - default_timeout=None, - client_info=client_info, - ), - self.delete_operation: self._wrap_method( - self.delete_operation, - default_timeout=None, - client_info=client_info, - ), - self.get_operation: self._wrap_method( - self.get_operation, - default_timeout=None, - client_info=client_info, - ), - self.list_operations: self._wrap_method( - self.list_operations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def delete_operation( - self, - ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/DeleteOperation", - request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["delete_operation"] - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def list_operations( - self, - ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self._logged_channel.unary_unary( - "/google.longrunning.Operations/ListOperations", - request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, - response_deserializer=operations_pb2.ListOperationsResponse.FromString, - ) - return self._stubs["list_operations"] - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ( - 'ClientGatewaysServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py deleted file mode 100644 index 421f7e4a7be1..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest.py +++ /dev/null @@ -1,2144 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.api_core import operations_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.longrunning import operations_pb2 # type: ignore - - -from .rest_base import _BaseClientGatewaysServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class ClientGatewaysServiceRestInterceptor: - """Interceptor for ClientGatewaysService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ClientGatewaysServiceRestTransport. - - .. code-block:: python - class MyCustomClientGatewaysServiceInterceptor(ClientGatewaysServiceRestInterceptor): - def pre_create_client_gateway(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_client_gateway(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_client_gateway(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_client_gateway(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_client_gateway(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_client_gateway(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_client_gateways(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_client_gateways(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ClientGatewaysServiceRestTransport(interceptor=MyCustomClientGatewaysServiceInterceptor()) - client = ClientGatewaysServiceClient(transport=transport) - - - """ - def pre_create_client_gateway(self, request: client_gateways_service.CreateClientGatewayRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_gateways_service.CreateClientGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_client_gateway - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_create_client_gateway(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for create_client_gateway - - DEPRECATED. Please use the `post_create_client_gateway_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. This `post_create_client_gateway` interceptor runs - before the `post_create_client_gateway_with_metadata` interceptor. - """ - return response - - def post_create_client_gateway_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_client_gateway - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientGatewaysService server but before it is returned to user code. - - We recommend only using this `post_create_client_gateway_with_metadata` - interceptor in new development instead of the `post_create_client_gateway` interceptor. - When both interceptors are used, this `post_create_client_gateway_with_metadata` interceptor runs after the - `post_create_client_gateway` interceptor. The (possibly modified) response returned by - `post_create_client_gateway` will be passed to - `post_create_client_gateway_with_metadata`. - """ - return response, metadata - - def pre_delete_client_gateway(self, request: client_gateways_service.DeleteClientGatewayRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_gateways_service.DeleteClientGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_client_gateway - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_delete_client_gateway(self, response: operations_pb2.Operation) -> operations_pb2.Operation: - """Post-rpc interceptor for delete_client_gateway - - DEPRECATED. Please use the `post_delete_client_gateway_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. This `post_delete_client_gateway` interceptor runs - before the `post_delete_client_gateway_with_metadata` interceptor. - """ - return response - - def post_delete_client_gateway_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_client_gateway - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientGatewaysService server but before it is returned to user code. - - We recommend only using this `post_delete_client_gateway_with_metadata` - interceptor in new development instead of the `post_delete_client_gateway` interceptor. - When both interceptors are used, this `post_delete_client_gateway_with_metadata` interceptor runs after the - `post_delete_client_gateway` interceptor. The (possibly modified) response returned by - `post_delete_client_gateway` will be passed to - `post_delete_client_gateway_with_metadata`. - """ - return response, metadata - - def pre_get_client_gateway(self, request: client_gateways_service.GetClientGatewayRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_gateways_service.GetClientGatewayRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_client_gateway - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_get_client_gateway(self, response: client_gateways_service.ClientGateway) -> client_gateways_service.ClientGateway: - """Post-rpc interceptor for get_client_gateway - - DEPRECATED. Please use the `post_get_client_gateway_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. This `post_get_client_gateway` interceptor runs - before the `post_get_client_gateway_with_metadata` interceptor. - """ - return response - - def post_get_client_gateway_with_metadata(self, response: client_gateways_service.ClientGateway, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_gateways_service.ClientGateway, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_client_gateway - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientGatewaysService server but before it is returned to user code. - - We recommend only using this `post_get_client_gateway_with_metadata` - interceptor in new development instead of the `post_get_client_gateway` interceptor. - When both interceptors are used, this `post_get_client_gateway_with_metadata` interceptor runs after the - `post_get_client_gateway` interceptor. The (possibly modified) response returned by - `post_get_client_gateway` will be passed to - `post_get_client_gateway_with_metadata`. - """ - return response, metadata - - def pre_list_client_gateways(self, request: client_gateways_service.ListClientGatewaysRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_gateways_service.ListClientGatewaysRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_client_gateways - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_list_client_gateways(self, response: client_gateways_service.ListClientGatewaysResponse) -> client_gateways_service.ListClientGatewaysResponse: - """Post-rpc interceptor for list_client_gateways - - DEPRECATED. Please use the `post_list_client_gateways_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. This `post_list_client_gateways` interceptor runs - before the `post_list_client_gateways_with_metadata` interceptor. - """ - return response - - def post_list_client_gateways_with_metadata(self, response: client_gateways_service.ListClientGatewaysResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[client_gateways_service.ListClientGatewaysResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_client_gateways - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ClientGatewaysService server but before it is returned to user code. - - We recommend only using this `post_list_client_gateways_with_metadata` - interceptor in new development instead of the `post_list_client_gateways` interceptor. - When both interceptors are used, this `post_list_client_gateways_with_metadata` interceptor runs after the - `post_list_client_gateways` interceptor. The (possibly modified) response returned by - `post_list_client_gateways` will be passed to - `post_list_client_gateways_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: policy_pb2.Policy - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_cancel_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_delete_operation( - self, response: None - ) -> None: - """Post-rpc interceptor for delete_operation - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.Operation - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_operations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ClientGatewaysService server. - """ - return request, metadata - - def post_list_operations( - self, response: operations_pb2.ListOperationsResponse - ) -> operations_pb2.ListOperationsResponse: - """Post-rpc interceptor for list_operations - - Override in a subclass to manipulate the response - after it is returned by the ClientGatewaysService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ClientGatewaysServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ClientGatewaysServiceRestInterceptor - - -class ClientGatewaysServiceRestTransport(_BaseClientGatewaysServiceRestTransport): - """REST backend synchronous transport for ClientGatewaysService. - - API Overview: - - The ``beyondcorp.googleapis.com`` service implements the Google - Cloud BeyondCorp API. - - Data Model: - - The ClientGatewaysService exposes the following resources: - - - Client Gateways, named as follows: - ``projects/{project_id}/locations/{location_id}/clientGateways/{client_gateway_id}``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ClientGatewaysServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ClientGatewaysServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ], - 'google.longrunning.Operations.DeleteOperation': [ - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.GetOperation': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ], - 'google.longrunning.Operations.ListOperations': [ - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ], - } - - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") - - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) - - # Return the client from cache. - return self._operations_client - - class _CreateClientGateway(_BaseClientGatewaysServiceRestTransport._BaseCreateClientGateway, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.CreateClientGateway") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: client_gateways_service.CreateClientGatewayRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the create client gateway method over HTTP. - - Args: - request (~.client_gateways_service.CreateClientGatewayRequest): - The request object. Message for creating a ClientGateway. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseCreateClientGateway._get_http_options() - - request, metadata = self._interceptor.pre_create_client_gateway(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseCreateClientGateway._get_transcoded_request(http_options, request) - - body = _BaseClientGatewaysServiceRestTransport._BaseCreateClientGateway._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseCreateClientGateway._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.CreateClientGateway", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "CreateClientGateway", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._CreateClientGateway._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_client_gateway(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_client_gateway_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.create_client_gateway", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "CreateClientGateway", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteClientGateway(_BaseClientGatewaysServiceRestTransport._BaseDeleteClientGateway, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.DeleteClientGateway") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: client_gateways_service.DeleteClientGatewayRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete client gateway method over HTTP. - - Args: - request (~.client_gateways_service.DeleteClientGatewayRequest): - The request object. Message for deleting a ClientGateway - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseDeleteClientGateway._get_http_options() - - request, metadata = self._interceptor.pre_delete_client_gateway(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseDeleteClientGateway._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseDeleteClientGateway._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.DeleteClientGateway", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "DeleteClientGateway", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._DeleteClientGateway._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_client_gateway(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_client_gateway_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.delete_client_gateway", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "DeleteClientGateway", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetClientGateway(_BaseClientGatewaysServiceRestTransport._BaseGetClientGateway, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.GetClientGateway") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: client_gateways_service.GetClientGatewayRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> client_gateways_service.ClientGateway: - r"""Call the get client gateway method over HTTP. - - Args: - request (~.client_gateways_service.GetClientGatewayRequest): - The request object. Message for getting a ClientGateway. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.client_gateways_service.ClientGateway: - Message describing ClientGateway - object. - - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseGetClientGateway._get_http_options() - - request, metadata = self._interceptor.pre_get_client_gateway(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseGetClientGateway._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseGetClientGateway._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.GetClientGateway", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetClientGateway", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._GetClientGateway._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = client_gateways_service.ClientGateway() - pb_resp = client_gateways_service.ClientGateway.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_client_gateway(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_client_gateway_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = client_gateways_service.ClientGateway.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.get_client_gateway", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetClientGateway", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListClientGateways(_BaseClientGatewaysServiceRestTransport._BaseListClientGateways, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.ListClientGateways") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: client_gateways_service.ListClientGatewaysRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> client_gateways_service.ListClientGatewaysResponse: - r"""Call the list client gateways method over HTTP. - - Args: - request (~.client_gateways_service.ListClientGatewaysRequest): - The request object. Message for requesting list of - ClientGateways. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.client_gateways_service.ListClientGatewaysResponse: - Message for response to listing - ClientGateways. - - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseListClientGateways._get_http_options() - - request, metadata = self._interceptor.pre_list_client_gateways(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseListClientGateways._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseListClientGateways._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.ListClientGateways", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "ListClientGateways", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._ListClientGateways._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = client_gateways_service.ListClientGatewaysResponse() - pb_resp = client_gateways_service.ListClientGatewaysResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_client_gateways(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_client_gateways_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = client_gateways_service.ListClientGatewaysResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.list_client_gateways", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "ListClientGateways", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_client_gateway(self) -> Callable[ - [client_gateways_service.CreateClientGatewayRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateClientGateway(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_client_gateway(self) -> Callable[ - [client_gateways_service.DeleteClientGatewayRequest], - operations_pb2.Operation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteClientGateway(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_client_gateway(self) -> Callable[ - [client_gateways_service.GetClientGatewayRequest], - client_gateways_service.ClientGateway]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetClientGateway(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_client_gateways(self) -> Callable[ - [client_gateways_service.ListClientGatewaysRequest], - client_gateways_service.ListClientGatewaysResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListClientGateways(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseClientGatewaysServiceRestTransport._BaseGetLocation, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseClientGatewaysServiceRestTransport._BaseListLocations, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(_BaseClientGatewaysServiceRestTransport._BaseGetIamPolicy, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(_BaseClientGatewaysServiceRestTransport._BaseSetIamPolicy, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseClientGatewaysServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = policy_pb2.Policy() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "SetIamPolicy", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(_BaseClientGatewaysServiceRestTransport._BaseTestIamPermissions, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseClientGatewaysServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "TestIamPermissions", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(_BaseClientGatewaysServiceRestTransport._BaseCancelOperation, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.CancelOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseCancelOperation._get_http_options() - - request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - - body = _BaseClientGatewaysServiceRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.CancelOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "CancelOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - - class _DeleteOperation(_BaseClientGatewaysServiceRestTransport._BaseDeleteOperation, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.DeleteOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - - r"""Call the delete operation method over HTTP. - - Args: - request (operations_pb2.DeleteOperationRequest): - The request object for DeleteOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseDeleteOperation._get_http_options() - - request, metadata = self._interceptor.pre_delete_operation(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.DeleteOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "DeleteOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_delete_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(_BaseClientGatewaysServiceRestTransport._BaseGetOperation, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.GetOperation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseGetOperation._get_http_options() - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetOperation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.Operation() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.GetOperation", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "GetOperation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - - class _ListOperations(_BaseClientGatewaysServiceRestTransport._BaseListOperations, ClientGatewaysServiceRestStub): - def __hash__(self): - return hash("ClientGatewaysServiceRestTransport.ListOperations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - - r"""Call the list operations method over HTTP. - - Args: - request (operations_pb2.ListOperationsRequest): - The request object for ListOperations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - operations_pb2.ListOperationsResponse: Response from ListOperations method. - """ - - http_options = _BaseClientGatewaysServiceRestTransport._BaseListOperations._get_http_options() - - request, metadata = self._interceptor.pre_list_operations(request, metadata) - transcoded_request = _BaseClientGatewaysServiceRestTransport._BaseListOperations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseClientGatewaysServiceRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "ListOperations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ClientGatewaysServiceRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.beyondcorp.clientgateways_v1.ClientGatewaysServiceAsyncClient.ListOperations", - extra = { - "serviceName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "rpcName": "ListOperations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ClientGatewaysServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest_base.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest_base.py deleted file mode 100644 index 9bb53e0a5be6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/transports/rest_base.py +++ /dev/null @@ -1,549 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from .base import ClientGatewaysServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.longrunning import operations_pb2 # type: ignore - - -class _BaseClientGatewaysServiceRestTransport(ClientGatewaysServiceTransport): - """Base REST backend transport for ClientGatewaysService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'beyondcorp.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'beyondcorp.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateClientGateway: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/clientGateways', - 'body': 'client_gateway', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_gateways_service.CreateClientGatewayRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientGatewaysServiceRestTransport._BaseCreateClientGateway._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteClientGateway: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/clientGateways/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_gateways_service.DeleteClientGatewayRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientGatewaysServiceRestTransport._BaseDeleteClientGateway._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetClientGateway: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/clientGateways/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_gateways_service.GetClientGatewayRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientGatewaysServiceRestTransport._BaseGetClientGateway._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListClientGateways: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/clientGateways', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = client_gateways_service.ListClientGatewaysRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseClientGatewaysServiceRestTransport._BaseListClientGateways._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:getIamPolicy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnections/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appConnectors/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/appGateways/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientConnectorServices/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/clientGateways/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseCancelOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseDeleteOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseGetOperation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListOperations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseClientGatewaysServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/types/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/types/__init__.py deleted file mode 100644 index d146d2aa9599..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/types/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client_gateways_service import ( - ClientGateway, - ClientGatewayOperationMetadata, - CreateClientGatewayRequest, - DeleteClientGatewayRequest, - GetClientGatewayRequest, - ListClientGatewaysRequest, - ListClientGatewaysResponse, -) - -__all__ = ( - 'ClientGateway', - 'ClientGatewayOperationMetadata', - 'CreateClientGatewayRequest', - 'DeleteClientGatewayRequest', - 'GetClientGatewayRequest', - 'ListClientGatewaysRequest', - 'ListClientGatewaysResponse', -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/types/client_gateways_service.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/types/client_gateways_service.py deleted file mode 100644 index 27c8421a8194..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/google/cloud/beyondcorp_clientgateways_v1/types/client_gateways_service.py +++ /dev/null @@ -1,382 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.beyondcorp.clientgateways.v1', - manifest={ - 'ClientGateway', - 'ListClientGatewaysRequest', - 'ListClientGatewaysResponse', - 'GetClientGatewayRequest', - 'CreateClientGatewayRequest', - 'DeleteClientGatewayRequest', - 'ClientGatewayOperationMetadata', - }, -) - - -class ClientGateway(proto.Message): - r"""Message describing ClientGateway object. - - Attributes: - name (str): - Required. name of resource. The name is - ignored during creation. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. [Output only] Create time stamp. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. [Output only] Update time stamp. - state (google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway.State): - Output only. The operational state of the - gateway. - id (str): - Output only. A unique identifier for the - instance generated by the system. - client_connector_service (str): - Output only. The client connector service name that the - client gateway is associated to. Client Connector Services, - named as follows: - ``projects/{project_id}/locations/{location_id}/client_connector_services/{client_connector_service_id}``. - """ - class State(proto.Enum): - r"""Represents the different states of a gateway. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - CREATING (1): - Gateway is being created. - UPDATING (2): - Gateway is being updated. - DELETING (3): - Gateway is being deleted. - RUNNING (4): - Gateway is running. - DOWN (5): - Gateway is down and may be restored in the - future. This happens when CCFE sends - ProjectState = OFF. - ERROR (6): - ClientGateway encountered an error and is in - indeterministic state. - """ - STATE_UNSPECIFIED = 0 - CREATING = 1 - UPDATING = 2 - DELETING = 3 - RUNNING = 4 - DOWN = 5 - ERROR = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - id: str = proto.Field( - proto.STRING, - number=5, - ) - client_connector_service: str = proto.Field( - proto.STRING, - number=6, - ) - - -class ListClientGatewaysRequest(proto.Message): - r"""Message for requesting list of ClientGateways. - - Attributes: - parent (str): - Required. Parent value for - ListClientGatewaysRequest. - page_size (int): - Optional. Requested page size. Server may - return fewer items than requested. If - unspecified, server will pick an appropriate - default. - page_token (str): - Optional. A token identifying a page of - results the server should return. - filter (str): - Optional. Filtering results. - order_by (str): - Optional. Hint for how to order the results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListClientGatewaysResponse(proto.Message): - r"""Message for response to listing ClientGateways. - - Attributes: - client_gateways (MutableSequence[google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway]): - The list of ClientGateway. - next_page_token (str): - A token identifying a page of results the - server should return. - unreachable (MutableSequence[str]): - Locations that could not be reached. - """ - - @property - def raw_page(self): - return self - - client_gateways: MutableSequence['ClientGateway'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ClientGateway', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class GetClientGatewayRequest(proto.Message): - r"""Message for getting a ClientGateway. - - Attributes: - name (str): - Required. Name of the resource - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateClientGatewayRequest(proto.Message): - r"""Message for creating a ClientGateway. - - Attributes: - parent (str): - Required. Value for parent. - client_gateway_id (str): - Optional. User-settable client gateway resource ID. - - - Must start with a letter. - - Must contain between 4-63 characters from - ``/[a-z][0-9]-/``. - - Must end with a number or a letter. - client_gateway (google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway): - Required. The resource being created. - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes since the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - client_gateway_id: str = proto.Field( - proto.STRING, - number=2, - ) - client_gateway: 'ClientGateway' = proto.Field( - proto.MESSAGE, - number=3, - message='ClientGateway', - ) - request_id: str = proto.Field( - proto.STRING, - number=4, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class DeleteClientGatewayRequest(proto.Message): - r"""Message for deleting a ClientGateway - - Attributes: - name (str): - Required. Name of the resource - request_id (str): - Optional. An optional request ID to identify - requests. Specify a unique request ID so that if - you must retry your request, the server will - know to ignore the request if it has already - been completed. The server will guarantee that - for at least 60 minutes after the first request. - - For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be a valid UUID with the - exception that zero UUID is not supported - (00000000-0000-0000-0000-000000000000). - validate_only (bool): - Optional. If set, validates request by - executing a dry-run which would not alter the - resource in any way. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - request_id: str = proto.Field( - proto.STRING, - number=2, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class ClientGatewayOperationMetadata(proto.Message): - r"""Represents the metadata of the long-running operation. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have been - cancelled successfully have [Operation.error][] value with a - [google.rpc.Status.code][google.rpc.Status.code] of 1, - corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/mypy.ini b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/noxfile.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/noxfile.py deleted file mode 100644 index cc2be6cdb220..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-beyondcorp-clientgateways' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_clientgateways_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/beyondcorp_clientgateways_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_create_client_gateway_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_create_client_gateway_async.py deleted file mode 100644 index 25a9d2d8c903..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_create_client_gateway_async.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateClientGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_CreateClientGateway_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -async def sample_create_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - client_gateway = beyondcorp_clientgateways_v1.ClientGateway() - client_gateway.name = "name_value" - - request = beyondcorp_clientgateways_v1.CreateClientGatewayRequest( - parent="parent_value", - client_gateway=client_gateway, - ) - - # Make the request - operation = client.create_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_CreateClientGateway_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_create_client_gateway_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_create_client_gateway_sync.py deleted file mode 100644 index cdea142a2ca9..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_create_client_gateway_sync.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateClientGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_CreateClientGateway_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -def sample_create_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - client_gateway = beyondcorp_clientgateways_v1.ClientGateway() - client_gateway.name = "name_value" - - request = beyondcorp_clientgateways_v1.CreateClientGatewayRequest( - parent="parent_value", - client_gateway=client_gateway, - ) - - # Make the request - operation = client.create_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_CreateClientGateway_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_async.py deleted file mode 100644 index d4b8d420f755..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteClientGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_DeleteClientGateway_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -async def sample_delete_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.DeleteClientGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_DeleteClientGateway_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_sync.py deleted file mode 100644 index f3b58903084b..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteClientGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_DeleteClientGateway_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -def sample_delete_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.DeleteClientGatewayRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_client_gateway(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_DeleteClientGateway_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_get_client_gateway_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_get_client_gateway_async.py deleted file mode 100644 index 98fae33232be..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_get_client_gateway_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetClientGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_GetClientGateway_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -async def sample_get_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.GetClientGatewayRequest( - name="name_value", - ) - - # Make the request - response = await client.get_client_gateway(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_GetClientGateway_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_get_client_gateway_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_get_client_gateway_sync.py deleted file mode 100644 index 529f78b7aa7c..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_get_client_gateway_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetClientGateway -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_GetClientGateway_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -def sample_get_client_gateway(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.GetClientGatewayRequest( - name="name_value", - ) - - # Make the request - response = client.get_client_gateway(request=request) - - # Handle the response - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_GetClientGateway_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_list_client_gateways_async.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_list_client_gateways_async.py deleted file mode 100644 index 7fc7f2a48e61..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_list_client_gateways_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListClientGateways -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_ListClientGateways_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -async def sample_list_client_gateways(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.ListClientGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_gateways(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_ListClientGateways_async] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_list_client_gateways_sync.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_list_client_gateways_sync.py deleted file mode 100644 index 6a2778c68ed2..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/beyondcorp_v1_generated_client_gateways_service_list_client_gateways_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListClientGateways -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-beyondcorp-clientgateways - - -# [START beyondcorp_v1_generated_ClientGatewaysService_ListClientGateways_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import beyondcorp_clientgateways_v1 - - -def sample_list_client_gateways(): - # Create a client - client = beyondcorp_clientgateways_v1.ClientGatewaysServiceClient() - - # Initialize request argument(s) - request = beyondcorp_clientgateways_v1.ListClientGatewaysRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_client_gateways(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END beyondcorp_v1_generated_ClientGatewaysService_ListClientGateways_sync] diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json deleted file mode 100644 index 1aa87eed26ed..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json +++ /dev/null @@ -1,675 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.beyondcorp.clientgateways.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-beyondcorp-clientgateways", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient", - "shortName": "ClientGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient.create_client_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.CreateClientGateway", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "CreateClientGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.CreateClientGatewayRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "client_gateway", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway" - }, - { - "name": "client_gateway_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_client_gateway" - }, - "description": "Sample for CreateClientGateway", - "file": "beyondcorp_v1_generated_client_gateways_service_create_client_gateway_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_CreateClientGateway_async", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_create_client_gateway_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient", - "shortName": "ClientGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient.create_client_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.CreateClientGateway", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "CreateClientGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.CreateClientGatewayRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "client_gateway", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway" - }, - { - "name": "client_gateway_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_client_gateway" - }, - "description": "Sample for CreateClientGateway", - "file": "beyondcorp_v1_generated_client_gateways_service_create_client_gateway_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_CreateClientGateway_sync", - "segments": [ - { - "end": 59, - "start": 27, - "type": "FULL" - }, - { - "end": 59, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 56, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 60, - "start": 57, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_create_client_gateway_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient", - "shortName": "ClientGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient.delete_client_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.DeleteClientGateway", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "DeleteClientGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.DeleteClientGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_client_gateway" - }, - "description": "Sample for DeleteClientGateway", - "file": "beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_DeleteClientGateway_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient", - "shortName": "ClientGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient.delete_client_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.DeleteClientGateway", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "DeleteClientGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.DeleteClientGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_client_gateway" - }, - "description": "Sample for DeleteClientGateway", - "file": "beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_DeleteClientGateway_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_delete_client_gateway_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient", - "shortName": "ClientGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient.get_client_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.GetClientGateway", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "GetClientGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.GetClientGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway", - "shortName": "get_client_gateway" - }, - "description": "Sample for GetClientGateway", - "file": "beyondcorp_v1_generated_client_gateways_service_get_client_gateway_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_GetClientGateway_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_get_client_gateway_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient", - "shortName": "ClientGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient.get_client_gateway", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.GetClientGateway", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "GetClientGateway" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.GetClientGatewayRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientgateways_v1.types.ClientGateway", - "shortName": "get_client_gateway" - }, - "description": "Sample for GetClientGateway", - "file": "beyondcorp_v1_generated_client_gateways_service_get_client_gateway_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_GetClientGateway_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_get_client_gateway_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient", - "shortName": "ClientGatewaysServiceAsyncClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceAsyncClient.list_client_gateways", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.ListClientGateways", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "ListClientGateways" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.pagers.ListClientGatewaysAsyncPager", - "shortName": "list_client_gateways" - }, - "description": "Sample for ListClientGateways", - "file": "beyondcorp_v1_generated_client_gateways_service_list_client_gateways_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_ListClientGateways_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_list_client_gateways_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient", - "shortName": "ClientGatewaysServiceClient" - }, - "fullName": "google.cloud.beyondcorp_clientgateways_v1.ClientGatewaysServiceClient.list_client_gateways", - "method": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService.ListClientGateways", - "service": { - "fullName": "google.cloud.beyondcorp.clientgateways.v1.ClientGatewaysService", - "shortName": "ClientGatewaysService" - }, - "shortName": "ListClientGateways" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.beyondcorp_clientgateways_v1.types.ListClientGatewaysRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.pagers.ListClientGatewaysPager", - "shortName": "list_client_gateways" - }, - "description": "Sample for ListClientGateways", - "file": "beyondcorp_v1_generated_client_gateways_service_list_client_gateways_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "beyondcorp_v1_generated_ClientGatewaysService_ListClientGateways_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "beyondcorp_v1_generated_client_gateways_service_list_client_gateways_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/scripts/fixup_beyondcorp_clientgateways_v1_keywords.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/scripts/fixup_beyondcorp_clientgateways_v1_keywords.py deleted file mode 100644 index 6a56be3f3378..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/scripts/fixup_beyondcorp_clientgateways_v1_keywords.py +++ /dev/null @@ -1,179 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class beyondcorp_clientgatewaysCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_client_gateway': ('parent', 'client_gateway', 'client_gateway_id', 'request_id', 'validate_only', ), - 'delete_client_gateway': ('name', 'request_id', 'validate_only', ), - 'get_client_gateway': ('name', ), - 'list_client_gateways': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=beyondcorp_clientgatewaysCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the beyondcorp_clientgateways client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/setup.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/setup.py deleted file mode 100644 index bf537b691f45..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-beyondcorp-clientgateways' - - -description = "Google Cloud Beyondcorp Clientgateways API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/beyondcorp_clientgateways/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-beyondcorp-clientgateways" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/beyondcorp_clientgateways_v1/__init__.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/beyondcorp_clientgateways_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/beyondcorp_clientgateways_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py b/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py deleted file mode 100644 index 930342739de6..000000000000 --- a/owl-bot-staging/google-cloud-beyondcorp-clientgateways/v1/tests/unit/gapic/beyondcorp_clientgateways_v1/test_client_gateways_service.py +++ /dev/null @@ -1,6300 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service import ClientGatewaysServiceAsyncClient -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service import ClientGatewaysServiceClient -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service import pagers -from google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service import transports -from google.cloud.beyondcorp_clientgateways_v1.types import client_gateways_service -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ClientGatewaysServiceClient._get_default_mtls_endpoint(None) is None - assert ClientGatewaysServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ClientGatewaysServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ClientGatewaysServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ClientGatewaysServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ClientGatewaysServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert ClientGatewaysServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ClientGatewaysServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ClientGatewaysServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ClientGatewaysServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ClientGatewaysServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ClientGatewaysServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ClientGatewaysServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - ClientGatewaysServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ClientGatewaysServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert ClientGatewaysServiceClient._get_client_cert_source(None, False) is None - assert ClientGatewaysServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert ClientGatewaysServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ClientGatewaysServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert ClientGatewaysServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(ClientGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceClient)) -@mock.patch.object(ClientGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = ClientGatewaysServiceClient._DEFAULT_UNIVERSE - default_endpoint = ClientGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ClientGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert ClientGatewaysServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ClientGatewaysServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ClientGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - assert ClientGatewaysServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ClientGatewaysServiceClient._get_api_endpoint(None, None, default_universe, "always") == ClientGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - assert ClientGatewaysServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ClientGatewaysServiceClient.DEFAULT_MTLS_ENDPOINT - assert ClientGatewaysServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ClientGatewaysServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - ClientGatewaysServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ClientGatewaysServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ClientGatewaysServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ClientGatewaysServiceClient._get_universe_domain(None, None) == ClientGatewaysServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - ClientGatewaysServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = ClientGatewaysServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = ClientGatewaysServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (ClientGatewaysServiceClient, "grpc"), - (ClientGatewaysServiceAsyncClient, "grpc_asyncio"), - (ClientGatewaysServiceClient, "rest"), -]) -def test_client_gateways_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ClientGatewaysServiceGrpcTransport, "grpc"), - (transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ClientGatewaysServiceRestTransport, "rest"), -]) -def test_client_gateways_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ClientGatewaysServiceClient, "grpc"), - (ClientGatewaysServiceAsyncClient, "grpc_asyncio"), - (ClientGatewaysServiceClient, "rest"), -]) -def test_client_gateways_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://beyondcorp.googleapis.com' - ) - - -def test_client_gateways_service_client_get_transport_class(): - transport = ClientGatewaysServiceClient.get_transport_class() - available_transports = [ - transports.ClientGatewaysServiceGrpcTransport, - transports.ClientGatewaysServiceRestTransport, - ] - assert transport in available_transports - - transport = ClientGatewaysServiceClient.get_transport_class("grpc") - assert transport == transports.ClientGatewaysServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport, "grpc"), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceRestTransport, "rest"), -]) -@mock.patch.object(ClientGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceClient)) -@mock.patch.object(ClientGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceAsyncClient)) -def test_client_gateways_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ClientGatewaysServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ClientGatewaysServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport, "grpc", "true"), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport, "grpc", "false"), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceRestTransport, "rest", "true"), - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(ClientGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceClient)) -@mock.patch.object(ClientGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_client_gateways_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ClientGatewaysServiceClient, ClientGatewaysServiceAsyncClient -]) -@mock.patch.object(ClientGatewaysServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClientGatewaysServiceClient)) -@mock.patch.object(ClientGatewaysServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ClientGatewaysServiceAsyncClient)) -def test_client_gateways_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - ClientGatewaysServiceClient, ClientGatewaysServiceAsyncClient -]) -@mock.patch.object(ClientGatewaysServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceClient)) -@mock.patch.object(ClientGatewaysServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ClientGatewaysServiceAsyncClient)) -def test_client_gateways_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = ClientGatewaysServiceClient._DEFAULT_UNIVERSE - default_endpoint = ClientGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ClientGatewaysServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport, "grpc"), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceRestTransport, "rest"), -]) -def test_client_gateways_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport, "grpc", grpc_helpers), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceRestTransport, "rest", None), -]) -def test_client_gateways_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_client_gateways_service_client_client_options_from_dict(): - with mock.patch('google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.transports.ClientGatewaysServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ClientGatewaysServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport, "grpc", grpc_helpers), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_client_gateways_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.ListClientGatewaysRequest, - dict, -]) -def test_list_client_gateways(request_type, transport: str = 'grpc'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_gateways_service.ListClientGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_gateways_service.ListClientGatewaysRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientGatewaysPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_client_gateways_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_gateways_service.ListClientGatewaysRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_client_gateways(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_gateways_service.ListClientGatewaysRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - order_by='order_by_value', - ) - -def test_list_client_gateways_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_client_gateways in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_client_gateways] = mock_rpc - request = {} - client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_client_gateways(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_client_gateways_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_client_gateways in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_client_gateways] = mock_rpc - - request = {} - await client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_client_gateways(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_client_gateways_async(transport: str = 'grpc_asyncio', request_type=client_gateways_service.ListClientGatewaysRequest): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ListClientGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_gateways_service.ListClientGatewaysRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientGatewaysAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_client_gateways_async_from_dict(): - await test_list_client_gateways_async(request_type=dict) - -def test_list_client_gateways_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.ListClientGatewaysRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - call.return_value = client_gateways_service.ListClientGatewaysResponse() - client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_client_gateways_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.ListClientGatewaysRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ListClientGatewaysResponse()) - await client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_client_gateways_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_gateways_service.ListClientGatewaysResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_client_gateways( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_client_gateways_flattened_error(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_client_gateways( - client_gateways_service.ListClientGatewaysRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_client_gateways_flattened_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_gateways_service.ListClientGatewaysResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ListClientGatewaysResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_client_gateways( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_client_gateways_flattened_error_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_client_gateways( - client_gateways_service.ListClientGatewaysRequest(), - parent='parent_value', - ) - - -def test_list_client_gateways_pager(transport_name: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - next_page_token='abc', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[], - next_page_token='def', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - ], - next_page_token='ghi', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_client_gateways(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, client_gateways_service.ClientGateway) - for i in results) -def test_list_client_gateways_pages(transport_name: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - next_page_token='abc', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[], - next_page_token='def', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - ], - next_page_token='ghi', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - ), - RuntimeError, - ) - pages = list(client.list_client_gateways(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_client_gateways_async_pager(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - next_page_token='abc', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[], - next_page_token='def', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - ], - next_page_token='ghi', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_client_gateways(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, client_gateways_service.ClientGateway) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_client_gateways_async_pages(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - next_page_token='abc', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[], - next_page_token='def', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - ], - next_page_token='ghi', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_client_gateways(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.GetClientGatewayRequest, - dict, -]) -def test_get_client_gateway(request_type, transport: str = 'grpc'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_gateways_service.ClientGateway( - name='name_value', - state=client_gateways_service.ClientGateway.State.CREATING, - id='id_value', - client_connector_service='client_connector_service_value', - ) - response = client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_gateways_service.GetClientGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, client_gateways_service.ClientGateway) - assert response.name == 'name_value' - assert response.state == client_gateways_service.ClientGateway.State.CREATING - assert response.id == 'id_value' - assert response.client_connector_service == 'client_connector_service_value' - - -def test_get_client_gateway_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_gateways_service.GetClientGatewayRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_client_gateway(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_gateways_service.GetClientGatewayRequest( - name='name_value', - ) - -def test_get_client_gateway_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_client_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_client_gateway] = mock_rpc - request = {} - client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_client_gateway_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_client_gateway in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_client_gateway] = mock_rpc - - request = {} - await client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_client_gateway_async(transport: str = 'grpc_asyncio', request_type=client_gateways_service.GetClientGatewayRequest): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ClientGateway( - name='name_value', - state=client_gateways_service.ClientGateway.State.CREATING, - id='id_value', - client_connector_service='client_connector_service_value', - )) - response = await client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_gateways_service.GetClientGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, client_gateways_service.ClientGateway) - assert response.name == 'name_value' - assert response.state == client_gateways_service.ClientGateway.State.CREATING - assert response.id == 'id_value' - assert response.client_connector_service == 'client_connector_service_value' - - -@pytest.mark.asyncio -async def test_get_client_gateway_async_from_dict(): - await test_get_client_gateway_async(request_type=dict) - -def test_get_client_gateway_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.GetClientGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - call.return_value = client_gateways_service.ClientGateway() - client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_client_gateway_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.GetClientGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ClientGateway()) - await client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_client_gateway_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_gateways_service.ClientGateway() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_client_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_client_gateway_flattened_error(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_client_gateway( - client_gateways_service.GetClientGatewayRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_client_gateway_flattened_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = client_gateways_service.ClientGateway() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ClientGateway()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_client_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_client_gateway_flattened_error_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_client_gateway( - client_gateways_service.GetClientGatewayRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.CreateClientGatewayRequest, - dict, -]) -def test_create_client_gateway(request_type, transport: str = 'grpc'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_gateways_service.CreateClientGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_create_client_gateway_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_gateways_service.CreateClientGatewayRequest( - parent='parent_value', - client_gateway_id='client_gateway_id_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_client_gateway(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_gateways_service.CreateClientGatewayRequest( - parent='parent_value', - client_gateway_id='client_gateway_id_value', - request_id='request_id_value', - ) - -def test_create_client_gateway_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_client_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_client_gateway] = mock_rpc - request = {} - client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_client_gateway_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_client_gateway in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_client_gateway] = mock_rpc - - request = {} - await client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_client_gateway_async(transport: str = 'grpc_asyncio', request_type=client_gateways_service.CreateClientGatewayRequest): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_gateways_service.CreateClientGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_create_client_gateway_async_from_dict(): - await test_create_client_gateway_async(request_type=dict) - -def test_create_client_gateway_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.CreateClientGatewayRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_client_gateway_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.CreateClientGatewayRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_client_gateway_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_client_gateway( - parent='parent_value', - client_gateway=client_gateways_service.ClientGateway(name='name_value'), - client_gateway_id='client_gateway_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].client_gateway - mock_val = client_gateways_service.ClientGateway(name='name_value') - assert arg == mock_val - arg = args[0].client_gateway_id - mock_val = 'client_gateway_id_value' - assert arg == mock_val - - -def test_create_client_gateway_flattened_error(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_client_gateway( - client_gateways_service.CreateClientGatewayRequest(), - parent='parent_value', - client_gateway=client_gateways_service.ClientGateway(name='name_value'), - client_gateway_id='client_gateway_id_value', - ) - -@pytest.mark.asyncio -async def test_create_client_gateway_flattened_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_client_gateway( - parent='parent_value', - client_gateway=client_gateways_service.ClientGateway(name='name_value'), - client_gateway_id='client_gateway_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].client_gateway - mock_val = client_gateways_service.ClientGateway(name='name_value') - assert arg == mock_val - arg = args[0].client_gateway_id - mock_val = 'client_gateway_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_client_gateway_flattened_error_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_client_gateway( - client_gateways_service.CreateClientGatewayRequest(), - parent='parent_value', - client_gateway=client_gateways_service.ClientGateway(name='name_value'), - client_gateway_id='client_gateway_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.DeleteClientGatewayRequest, - dict, -]) -def test_delete_client_gateway(request_type, transport: str = 'grpc'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = client_gateways_service.DeleteClientGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_client_gateway_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = client_gateways_service.DeleteClientGatewayRequest( - name='name_value', - request_id='request_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_client_gateway(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == client_gateways_service.DeleteClientGatewayRequest( - name='name_value', - request_id='request_id_value', - ) - -def test_delete_client_gateway_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_client_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_client_gateway] = mock_rpc - request = {} - client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_client_gateway_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_client_gateway in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_client_gateway] = mock_rpc - - request = {} - await client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_client_gateway_async(transport: str = 'grpc_asyncio', request_type=client_gateways_service.DeleteClientGatewayRequest): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = client_gateways_service.DeleteClientGatewayRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_client_gateway_async_from_dict(): - await test_delete_client_gateway_async(request_type=dict) - -def test_delete_client_gateway_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.DeleteClientGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_client_gateway_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = client_gateways_service.DeleteClientGatewayRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_client_gateway_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_client_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_client_gateway_flattened_error(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_client_gateway( - client_gateways_service.DeleteClientGatewayRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_client_gateway_flattened_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_client_gateway( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_client_gateway_flattened_error_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_client_gateway( - client_gateways_service.DeleteClientGatewayRequest(), - name='name_value', - ) - - -def test_list_client_gateways_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_client_gateways in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_client_gateways] = mock_rpc - - request = {} - client.list_client_gateways(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_client_gateways(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_client_gateways_rest_required_fields(request_type=client_gateways_service.ListClientGatewaysRequest): - transport_class = transports.ClientGatewaysServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_client_gateways._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_client_gateways._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = client_gateways_service.ListClientGatewaysResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_gateways_service.ListClientGatewaysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_client_gateways(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_client_gateways_rest_unset_required_fields(): - transport = transports.ClientGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_client_gateways._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_client_gateways_rest_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_gateways_service.ListClientGatewaysResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = client_gateways_service.ListClientGatewaysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_client_gateways(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/clientGateways" % client.transport._host, args[1]) - - -def test_list_client_gateways_rest_flattened_error(transport: str = 'rest'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_client_gateways( - client_gateways_service.ListClientGatewaysRequest(), - parent='parent_value', - ) - - -def test_list_client_gateways_rest_pager(transport: str = 'rest'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - next_page_token='abc', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[], - next_page_token='def', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - ], - next_page_token='ghi', - ), - client_gateways_service.ListClientGatewaysResponse( - client_gateways=[ - client_gateways_service.ClientGateway(), - client_gateways_service.ClientGateway(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(client_gateways_service.ListClientGatewaysResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_client_gateways(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, client_gateways_service.ClientGateway) - for i in results) - - pages = list(client.list_client_gateways(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_client_gateway_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_client_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_client_gateway] = mock_rpc - - request = {} - client.get_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_client_gateway_rest_required_fields(request_type=client_gateways_service.GetClientGatewayRequest): - transport_class = transports.ClientGatewaysServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_client_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_client_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = client_gateways_service.ClientGateway() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_gateways_service.ClientGateway.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_client_gateway(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_client_gateway_rest_unset_required_fields(): - transport = transports.ClientGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_client_gateway._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_client_gateway_rest_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_gateways_service.ClientGateway() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/clientGateways/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = client_gateways_service.ClientGateway.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_client_gateway(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/clientGateways/*}" % client.transport._host, args[1]) - - -def test_get_client_gateway_rest_flattened_error(transport: str = 'rest'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_client_gateway( - client_gateways_service.GetClientGatewayRequest(), - name='name_value', - ) - - -def test_create_client_gateway_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_client_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_client_gateway] = mock_rpc - - request = {} - client.create_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_client_gateway_rest_required_fields(request_type=client_gateways_service.CreateClientGatewayRequest): - transport_class = transports.ClientGatewaysServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_client_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_client_gateway._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("client_gateway_id", "request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_client_gateway(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_client_gateway_rest_unset_required_fields(): - transport = transports.ClientGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_client_gateway._get_unset_required_fields({}) - assert set(unset_fields) == (set(("clientGatewayId", "requestId", "validateOnly", )) & set(("parent", "clientGateway", ))) - - -def test_create_client_gateway_rest_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - client_gateway=client_gateways_service.ClientGateway(name='name_value'), - client_gateway_id='client_gateway_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_client_gateway(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/clientGateways" % client.transport._host, args[1]) - - -def test_create_client_gateway_rest_flattened_error(transport: str = 'rest'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_client_gateway( - client_gateways_service.CreateClientGatewayRequest(), - parent='parent_value', - client_gateway=client_gateways_service.ClientGateway(name='name_value'), - client_gateway_id='client_gateway_id_value', - ) - - -def test_delete_client_gateway_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_client_gateway in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_client_gateway] = mock_rpc - - request = {} - client.delete_client_gateway(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_client_gateway(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_client_gateway_rest_required_fields(request_type=client_gateways_service.DeleteClientGatewayRequest): - transport_class = transports.ClientGatewaysServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_client_gateway._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_client_gateway._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id", "validate_only", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_client_gateway(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_client_gateway_rest_unset_required_fields(): - transport = transports.ClientGatewaysServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_client_gateway._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId", "validateOnly", )) & set(("name", ))) - - -def test_delete_client_gateway_rest_flattened(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/clientGateways/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_client_gateway(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/clientGateways/*}" % client.transport._host, args[1]) - - -def test_delete_client_gateway_rest_flattened_error(transport: str = 'rest'): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_client_gateway( - client_gateways_service.DeleteClientGatewayRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ClientGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ClientGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClientGatewaysServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ClientGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClientGatewaysServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ClientGatewaysServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ClientGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ClientGatewaysServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClientGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ClientGatewaysServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ClientGatewaysServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ClientGatewaysServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ClientGatewaysServiceGrpcTransport, - transports.ClientGatewaysServiceGrpcAsyncIOTransport, - transports.ClientGatewaysServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = ClientGatewaysServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_client_gateways_empty_call_grpc(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - call.return_value = client_gateways_service.ListClientGatewaysResponse() - client.list_client_gateways(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.ListClientGatewaysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_client_gateway_empty_call_grpc(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - call.return_value = client_gateways_service.ClientGateway() - client.get_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.GetClientGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_client_gateway_empty_call_grpc(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.create_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.CreateClientGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_client_gateway_empty_call_grpc(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.DeleteClientGatewayRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = ClientGatewaysServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_client_gateways_empty_call_grpc_asyncio(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ListClientGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_client_gateways(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.ListClientGatewaysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_client_gateway_empty_call_grpc_asyncio(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(client_gateways_service.ClientGateway( - name='name_value', - state=client_gateways_service.ClientGateway.State.CREATING, - id='id_value', - client_connector_service='client_connector_service_value', - )) - await client.get_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.GetClientGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_client_gateway_empty_call_grpc_asyncio(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.CreateClientGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_client_gateway_empty_call_grpc_asyncio(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.DeleteClientGatewayRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = ClientGatewaysServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_list_client_gateways_rest_bad_request(request_type=client_gateways_service.ListClientGatewaysRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_client_gateways(request) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.ListClientGatewaysRequest, - dict, -]) -def test_list_client_gateways_rest_call_success(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_gateways_service.ListClientGatewaysResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_gateways_service.ListClientGatewaysResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_client_gateways(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListClientGatewaysPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_client_gateways_rest_interceptors(null_interceptor): - transport = transports.ClientGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientGatewaysServiceRestInterceptor(), - ) - client = ClientGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_list_client_gateways") as post, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_list_client_gateways_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "pre_list_client_gateways") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_gateways_service.ListClientGatewaysRequest.pb(client_gateways_service.ListClientGatewaysRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = client_gateways_service.ListClientGatewaysResponse.to_json(client_gateways_service.ListClientGatewaysResponse()) - req.return_value.content = return_value - - request = client_gateways_service.ListClientGatewaysRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = client_gateways_service.ListClientGatewaysResponse() - post_with_metadata.return_value = client_gateways_service.ListClientGatewaysResponse(), metadata - - client.list_client_gateways(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_client_gateway_rest_bad_request(request_type=client_gateways_service.GetClientGatewayRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_client_gateway(request) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.GetClientGatewayRequest, - dict, -]) -def test_get_client_gateway_rest_call_success(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = client_gateways_service.ClientGateway( - name='name_value', - state=client_gateways_service.ClientGateway.State.CREATING, - id='id_value', - client_connector_service='client_connector_service_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = client_gateways_service.ClientGateway.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_client_gateway(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, client_gateways_service.ClientGateway) - assert response.name == 'name_value' - assert response.state == client_gateways_service.ClientGateway.State.CREATING - assert response.id == 'id_value' - assert response.client_connector_service == 'client_connector_service_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_client_gateway_rest_interceptors(null_interceptor): - transport = transports.ClientGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientGatewaysServiceRestInterceptor(), - ) - client = ClientGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_get_client_gateway") as post, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_get_client_gateway_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "pre_get_client_gateway") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_gateways_service.GetClientGatewayRequest.pb(client_gateways_service.GetClientGatewayRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = client_gateways_service.ClientGateway.to_json(client_gateways_service.ClientGateway()) - req.return_value.content = return_value - - request = client_gateways_service.GetClientGatewayRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = client_gateways_service.ClientGateway() - post_with_metadata.return_value = client_gateways_service.ClientGateway(), metadata - - client.get_client_gateway(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_client_gateway_rest_bad_request(request_type=client_gateways_service.CreateClientGatewayRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_client_gateway(request) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.CreateClientGatewayRequest, - dict, -]) -def test_create_client_gateway_rest_call_success(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["client_gateway"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'state': 1, 'id': 'id_value', 'client_connector_service': 'client_connector_service_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = client_gateways_service.CreateClientGatewayRequest.meta.fields["client_gateway"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["client_gateway"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["client_gateway"][field])): - del request_init["client_gateway"][field][i][subfield] - else: - del request_init["client_gateway"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_client_gateway(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_client_gateway_rest_interceptors(null_interceptor): - transport = transports.ClientGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientGatewaysServiceRestInterceptor(), - ) - client = ClientGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_create_client_gateway") as post, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_create_client_gateway_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "pre_create_client_gateway") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_gateways_service.CreateClientGatewayRequest.pb(client_gateways_service.CreateClientGatewayRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = client_gateways_service.CreateClientGatewayRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.create_client_gateway(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_client_gateway_rest_bad_request(request_type=client_gateways_service.DeleteClientGatewayRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_client_gateway(request) - - -@pytest.mark.parametrize("request_type", [ - client_gateways_service.DeleteClientGatewayRequest, - dict, -]) -def test_delete_client_gateway_rest_call_success(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/clientGateways/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_client_gateway(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_client_gateway_rest_interceptors(null_interceptor): - transport = transports.ClientGatewaysServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ClientGatewaysServiceRestInterceptor(), - ) - client = ClientGatewaysServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_delete_client_gateway") as post, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "post_delete_client_gateway_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ClientGatewaysServiceRestInterceptor, "pre_delete_client_gateway") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = client_gateways_service.DeleteClientGatewayRequest.pb(client_gateways_service.DeleteClientGatewayRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = client_gateways_service.DeleteClientGatewayRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_client_gateway(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/appConnections/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'resource': 'projects/sample1/locations/sample2/appConnections/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '{}' - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_operation(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_operations(request) - - -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_operations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_initialize_client_w_rest(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_client_gateways_empty_call_rest(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_client_gateways), - '__call__') as call: - client.list_client_gateways(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.ListClientGatewaysRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_client_gateway_empty_call_rest(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_client_gateway), - '__call__') as call: - client.get_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.GetClientGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_client_gateway_empty_call_rest(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_client_gateway), - '__call__') as call: - client.create_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.CreateClientGatewayRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_client_gateway_empty_call_rest(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_client_gateway), - '__call__') as call: - client.delete_client_gateway(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = client_gateways_service.DeleteClientGatewayRequest() - - assert args[0] == request_msg - - -def test_client_gateways_service_rest_lro_client(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - transport = client.transport - - # Ensure that we have an api-core operations client. - assert isinstance( - transport.operations_client, -operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ClientGatewaysServiceGrpcTransport, - ) - -def test_client_gateways_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ClientGatewaysServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_client_gateways_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.transports.ClientGatewaysServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ClientGatewaysServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_client_gateways', - 'get_client_gateway', - 'create_client_gateway', - 'delete_client_gateway', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_client_gateways_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.transports.ClientGatewaysServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ClientGatewaysServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_client_gateways_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.beyondcorp_clientgateways_v1.services.client_gateways_service.transports.ClientGatewaysServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ClientGatewaysServiceTransport() - adc.assert_called_once() - - -def test_client_gateways_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ClientGatewaysServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClientGatewaysServiceGrpcTransport, - transports.ClientGatewaysServiceGrpcAsyncIOTransport, - ], -) -def test_client_gateways_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ClientGatewaysServiceGrpcTransport, - transports.ClientGatewaysServiceGrpcAsyncIOTransport, - transports.ClientGatewaysServiceRestTransport, - ], -) -def test_client_gateways_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ClientGatewaysServiceGrpcTransport, grpc_helpers), - (transports.ClientGatewaysServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_client_gateways_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "beyondcorp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="beyondcorp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ClientGatewaysServiceGrpcTransport, transports.ClientGatewaysServiceGrpcAsyncIOTransport]) -def test_client_gateways_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_client_gateways_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ClientGatewaysServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_client_gateways_service_host_no_port(transport_name): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_client_gateways_service_host_with_port(transport_name): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='beyondcorp.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'beyondcorp.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://beyondcorp.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_client_gateways_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ClientGatewaysServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ClientGatewaysServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_client_gateways._session - session2 = client2.transport.list_client_gateways._session - assert session1 != session2 - session1 = client1.transport.get_client_gateway._session - session2 = client2.transport.get_client_gateway._session - assert session1 != session2 - session1 = client1.transport.create_client_gateway._session - session2 = client2.transport.create_client_gateway._session - assert session1 != session2 - session1 = client1.transport.delete_client_gateway._session - session2 = client2.transport.delete_client_gateway._session - assert session1 != session2 -def test_client_gateways_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ClientGatewaysServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_client_gateways_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ClientGatewaysServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ClientGatewaysServiceGrpcTransport, transports.ClientGatewaysServiceGrpcAsyncIOTransport]) -def test_client_gateways_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ClientGatewaysServiceGrpcTransport, transports.ClientGatewaysServiceGrpcAsyncIOTransport]) -def test_client_gateways_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_client_gateways_service_grpc_lro_client(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_client_gateways_service_grpc_lro_async_client(): - client = ClientGatewaysServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_client_gateway_path(): - project = "squid" - location = "clam" - client_gateway = "whelk" - expected = "projects/{project}/locations/{location}/clientGateways/{client_gateway}".format(project=project, location=location, client_gateway=client_gateway, ) - actual = ClientGatewaysServiceClient.client_gateway_path(project, location, client_gateway) - assert expected == actual - - -def test_parse_client_gateway_path(): - expected = { - "project": "octopus", - "location": "oyster", - "client_gateway": "nudibranch", - } - path = ClientGatewaysServiceClient.client_gateway_path(**expected) - - # Check that the path construction is reversible. - actual = ClientGatewaysServiceClient.parse_client_gateway_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ClientGatewaysServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = ClientGatewaysServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ClientGatewaysServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = ClientGatewaysServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = ClientGatewaysServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ClientGatewaysServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ClientGatewaysServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = ClientGatewaysServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ClientGatewaysServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = ClientGatewaysServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = ClientGatewaysServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ClientGatewaysServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ClientGatewaysServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = ClientGatewaysServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ClientGatewaysServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ClientGatewaysServiceTransport, '_prep_wrapped_messages') as prep: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ClientGatewaysServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ClientGatewaysServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_delete_operation(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.DeleteOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_delete_operation_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = None - - client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_delete_operation_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.DeleteOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.delete_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_delete_operation_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_delete_operation_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.delete_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_cancel_operation(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - -def test_cancel_operation_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_cancel_operation_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - None - ) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - -def test_get_operation_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_get_operation_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_operations(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - response = client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) -@pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.ListOperationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) - -def test_list_operations_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = operations_pb2.ListOperationsResponse() - - client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_operations_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.ListOperationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - await client.list_operations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_operations_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.ListOperationsResponse() - - response = client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_operations_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_operations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.ListOperationsResponse() - ) - response = await client.list_operations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_list_locations(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - -def test_get_iam_policy(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -def test_test_iam_permissions(transport: str = "grpc"): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = ClientGatewaysServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = ClientGatewaysServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ClientGatewaysServiceClient, transports.ClientGatewaysServiceGrpcTransport), - (ClientGatewaysServiceAsyncClient, transports.ClientGatewaysServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/.coveragerc deleted file mode 100644 index 5430380e1931..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_analyticshub/__init__.py - google/cloud/bigquery_analyticshub/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/.flake8 b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/MANIFEST.in deleted file mode 100644 index 3ff5adf1bfad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_analyticshub *.py -recursive-include google/cloud/bigquery_analyticshub_v1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/README.rst b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/README.rst deleted file mode 100644 index d5d608ecec10..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Analyticshub API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Analyticshub API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/analytics_hub_service.rst b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/analytics_hub_service.rst deleted file mode 100644 index a523d17403f1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/analytics_hub_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AnalyticsHubService -------------------------------------- - -.. automodule:: google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/services_.rst b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/services_.rst deleted file mode 100644 index 97fa91865983..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Analyticshub v1 API -====================================================== -.. toctree:: - :maxdepth: 2 - - analytics_hub_service diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/types_.rst b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/types_.rst deleted file mode 100644 index 9263d7bbb31d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/bigquery_analyticshub_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Analyticshub v1 API -=================================================== - -.. automodule:: google.cloud.bigquery_analyticshub_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/conf.py deleted file mode 100644 index 5a3ee6bed769..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-analyticshub documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-analyticshub" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-analyticshub-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-analyticshub.tex", - u"google-cloud-bigquery-analyticshub Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-analyticshub", - u"Google Cloud Bigquery Analyticshub Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-analyticshub", - u"google-cloud-bigquery-analyticshub Documentation", - author, - "google-cloud-bigquery-analyticshub", - "GAPIC library for Google Cloud Bigquery Analyticshub API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/index.rst deleted file mode 100644 index 7c1f98afda9c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_analyticshub_v1/services_ - bigquery_analyticshub_v1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/__init__.py deleted file mode 100644 index 8e41e6500c3d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/__init__.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_analyticshub import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.client import AnalyticsHubServiceClient -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.async_client import AnalyticsHubServiceAsyncClient - -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import CreateDataExchangeRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import CreateListingRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DataExchange -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DataProvider -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DeleteDataExchangeRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DeleteListingRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DeleteSubscriptionRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DestinationDataset -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DestinationDatasetReference -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import GetDataExchangeRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import GetListingRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import GetSubscriptionRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListDataExchangesRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListDataExchangesResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import Listing -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListListingsRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListListingsResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListOrgDataExchangesRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListOrgDataExchangesResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListSharedResourceSubscriptionsRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListSharedResourceSubscriptionsResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListSubscriptionsRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import ListSubscriptionsResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import OperationMetadata -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import Publisher -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import RefreshSubscriptionRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import RefreshSubscriptionResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import RevokeSubscriptionRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import RevokeSubscriptionResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import SharingEnvironmentConfig -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import SubscribeDataExchangeRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import SubscribeDataExchangeResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import SubscribeListingRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import SubscribeListingResponse -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import Subscription -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import UpdateDataExchangeRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import UpdateListingRequest -from google.cloud.bigquery_analyticshub_v1.types.analyticshub import DiscoveryType - -__all__ = ('AnalyticsHubServiceClient', - 'AnalyticsHubServiceAsyncClient', - 'CreateDataExchangeRequest', - 'CreateListingRequest', - 'DataExchange', - 'DataProvider', - 'DeleteDataExchangeRequest', - 'DeleteListingRequest', - 'DeleteSubscriptionRequest', - 'DestinationDataset', - 'DestinationDatasetReference', - 'GetDataExchangeRequest', - 'GetListingRequest', - 'GetSubscriptionRequest', - 'ListDataExchangesRequest', - 'ListDataExchangesResponse', - 'Listing', - 'ListListingsRequest', - 'ListListingsResponse', - 'ListOrgDataExchangesRequest', - 'ListOrgDataExchangesResponse', - 'ListSharedResourceSubscriptionsRequest', - 'ListSharedResourceSubscriptionsResponse', - 'ListSubscriptionsRequest', - 'ListSubscriptionsResponse', - 'OperationMetadata', - 'Publisher', - 'RefreshSubscriptionRequest', - 'RefreshSubscriptionResponse', - 'RevokeSubscriptionRequest', - 'RevokeSubscriptionResponse', - 'SharingEnvironmentConfig', - 'SubscribeDataExchangeRequest', - 'SubscribeDataExchangeResponse', - 'SubscribeListingRequest', - 'SubscribeListingResponse', - 'Subscription', - 'UpdateDataExchangeRequest', - 'UpdateListingRequest', - 'DiscoveryType', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/py.typed b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/py.typed deleted file mode 100644 index 77f387cebe7e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-analyticshub package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/__init__.py deleted file mode 100644 index 7c3a0ee29459..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_analyticshub_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.analytics_hub_service import AnalyticsHubServiceClient -from .services.analytics_hub_service import AnalyticsHubServiceAsyncClient - -from .types.analyticshub import CreateDataExchangeRequest -from .types.analyticshub import CreateListingRequest -from .types.analyticshub import DataExchange -from .types.analyticshub import DataProvider -from .types.analyticshub import DeleteDataExchangeRequest -from .types.analyticshub import DeleteListingRequest -from .types.analyticshub import DeleteSubscriptionRequest -from .types.analyticshub import DestinationDataset -from .types.analyticshub import DestinationDatasetReference -from .types.analyticshub import GetDataExchangeRequest -from .types.analyticshub import GetListingRequest -from .types.analyticshub import GetSubscriptionRequest -from .types.analyticshub import ListDataExchangesRequest -from .types.analyticshub import ListDataExchangesResponse -from .types.analyticshub import Listing -from .types.analyticshub import ListListingsRequest -from .types.analyticshub import ListListingsResponse -from .types.analyticshub import ListOrgDataExchangesRequest -from .types.analyticshub import ListOrgDataExchangesResponse -from .types.analyticshub import ListSharedResourceSubscriptionsRequest -from .types.analyticshub import ListSharedResourceSubscriptionsResponse -from .types.analyticshub import ListSubscriptionsRequest -from .types.analyticshub import ListSubscriptionsResponse -from .types.analyticshub import OperationMetadata -from .types.analyticshub import Publisher -from .types.analyticshub import RefreshSubscriptionRequest -from .types.analyticshub import RefreshSubscriptionResponse -from .types.analyticshub import RevokeSubscriptionRequest -from .types.analyticshub import RevokeSubscriptionResponse -from .types.analyticshub import SharingEnvironmentConfig -from .types.analyticshub import SubscribeDataExchangeRequest -from .types.analyticshub import SubscribeDataExchangeResponse -from .types.analyticshub import SubscribeListingRequest -from .types.analyticshub import SubscribeListingResponse -from .types.analyticshub import Subscription -from .types.analyticshub import UpdateDataExchangeRequest -from .types.analyticshub import UpdateListingRequest -from .types.analyticshub import DiscoveryType - -__all__ = ( - 'AnalyticsHubServiceAsyncClient', -'AnalyticsHubServiceClient', -'CreateDataExchangeRequest', -'CreateListingRequest', -'DataExchange', -'DataProvider', -'DeleteDataExchangeRequest', -'DeleteListingRequest', -'DeleteSubscriptionRequest', -'DestinationDataset', -'DestinationDatasetReference', -'DiscoveryType', -'GetDataExchangeRequest', -'GetListingRequest', -'GetSubscriptionRequest', -'ListDataExchangesRequest', -'ListDataExchangesResponse', -'ListListingsRequest', -'ListListingsResponse', -'ListOrgDataExchangesRequest', -'ListOrgDataExchangesResponse', -'ListSharedResourceSubscriptionsRequest', -'ListSharedResourceSubscriptionsResponse', -'ListSubscriptionsRequest', -'ListSubscriptionsResponse', -'Listing', -'OperationMetadata', -'Publisher', -'RefreshSubscriptionRequest', -'RefreshSubscriptionResponse', -'RevokeSubscriptionRequest', -'RevokeSubscriptionResponse', -'SharingEnvironmentConfig', -'SubscribeDataExchangeRequest', -'SubscribeDataExchangeResponse', -'SubscribeListingRequest', -'SubscribeListingResponse', -'Subscription', -'UpdateDataExchangeRequest', -'UpdateListingRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json deleted file mode 100644 index 35fc21fe2c76..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/gapic_metadata.json +++ /dev/null @@ -1,243 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_analyticshub_v1", - "protoPackage": "google.cloud.bigquery.analyticshub.v1", - "schema": "1.0", - "services": { - "AnalyticsHubService": { - "clients": { - "grpc": { - "libraryClient": "AnalyticsHubServiceClient", - "rpcs": { - "CreateDataExchange": { - "methods": [ - "create_data_exchange" - ] - }, - "CreateListing": { - "methods": [ - "create_listing" - ] - }, - "DeleteDataExchange": { - "methods": [ - "delete_data_exchange" - ] - }, - "DeleteListing": { - "methods": [ - "delete_listing" - ] - }, - "DeleteSubscription": { - "methods": [ - "delete_subscription" - ] - }, - "GetDataExchange": { - "methods": [ - "get_data_exchange" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetListing": { - "methods": [ - "get_listing" - ] - }, - "GetSubscription": { - "methods": [ - "get_subscription" - ] - }, - "ListDataExchanges": { - "methods": [ - "list_data_exchanges" - ] - }, - "ListListings": { - "methods": [ - "list_listings" - ] - }, - "ListOrgDataExchanges": { - "methods": [ - "list_org_data_exchanges" - ] - }, - "ListSharedResourceSubscriptions": { - "methods": [ - "list_shared_resource_subscriptions" - ] - }, - "ListSubscriptions": { - "methods": [ - "list_subscriptions" - ] - }, - "RefreshSubscription": { - "methods": [ - "refresh_subscription" - ] - }, - "RevokeSubscription": { - "methods": [ - "revoke_subscription" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "SubscribeDataExchange": { - "methods": [ - "subscribe_data_exchange" - ] - }, - "SubscribeListing": { - "methods": [ - "subscribe_listing" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataExchange": { - "methods": [ - "update_data_exchange" - ] - }, - "UpdateListing": { - "methods": [ - "update_listing" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AnalyticsHubServiceAsyncClient", - "rpcs": { - "CreateDataExchange": { - "methods": [ - "create_data_exchange" - ] - }, - "CreateListing": { - "methods": [ - "create_listing" - ] - }, - "DeleteDataExchange": { - "methods": [ - "delete_data_exchange" - ] - }, - "DeleteListing": { - "methods": [ - "delete_listing" - ] - }, - "DeleteSubscription": { - "methods": [ - "delete_subscription" - ] - }, - "GetDataExchange": { - "methods": [ - "get_data_exchange" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetListing": { - "methods": [ - "get_listing" - ] - }, - "GetSubscription": { - "methods": [ - "get_subscription" - ] - }, - "ListDataExchanges": { - "methods": [ - "list_data_exchanges" - ] - }, - "ListListings": { - "methods": [ - "list_listings" - ] - }, - "ListOrgDataExchanges": { - "methods": [ - "list_org_data_exchanges" - ] - }, - "ListSharedResourceSubscriptions": { - "methods": [ - "list_shared_resource_subscriptions" - ] - }, - "ListSubscriptions": { - "methods": [ - "list_subscriptions" - ] - }, - "RefreshSubscription": { - "methods": [ - "refresh_subscription" - ] - }, - "RevokeSubscription": { - "methods": [ - "revoke_subscription" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "SubscribeDataExchange": { - "methods": [ - "subscribe_data_exchange" - ] - }, - "SubscribeListing": { - "methods": [ - "subscribe_listing" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataExchange": { - "methods": [ - "update_data_exchange" - ] - }, - "UpdateListing": { - "methods": [ - "update_listing" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/py.typed b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/py.typed deleted file mode 100644 index 77f387cebe7e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-analyticshub package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/__init__.py deleted file mode 100644 index 0eb53446cbe6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AnalyticsHubServiceClient -from .async_client import AnalyticsHubServiceAsyncClient - -__all__ = ( - 'AnalyticsHubServiceClient', - 'AnalyticsHubServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py deleted file mode 100644 index 3e014c6db406..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py +++ /dev/null @@ -1,2822 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_analyticshub_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import pagers -from google.cloud.bigquery_analyticshub_v1.types import analyticshub -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AnalyticsHubServiceGrpcAsyncIOTransport -from .client import AnalyticsHubServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AnalyticsHubServiceAsyncClient: - """The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - """ - - _client: AnalyticsHubServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AnalyticsHubServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - - data_exchange_path = staticmethod(AnalyticsHubServiceClient.data_exchange_path) - parse_data_exchange_path = staticmethod(AnalyticsHubServiceClient.parse_data_exchange_path) - dataset_path = staticmethod(AnalyticsHubServiceClient.dataset_path) - parse_dataset_path = staticmethod(AnalyticsHubServiceClient.parse_dataset_path) - listing_path = staticmethod(AnalyticsHubServiceClient.listing_path) - parse_listing_path = staticmethod(AnalyticsHubServiceClient.parse_listing_path) - subscription_path = staticmethod(AnalyticsHubServiceClient.subscription_path) - parse_subscription_path = staticmethod(AnalyticsHubServiceClient.parse_subscription_path) - table_path = staticmethod(AnalyticsHubServiceClient.table_path) - parse_table_path = staticmethod(AnalyticsHubServiceClient.parse_table_path) - common_billing_account_path = staticmethod(AnalyticsHubServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AnalyticsHubServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AnalyticsHubServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AnalyticsHubServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AnalyticsHubServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AnalyticsHubServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AnalyticsHubServiceClient.common_project_path) - parse_common_project_path = staticmethod(AnalyticsHubServiceClient.parse_common_project_path) - common_location_path = staticmethod(AnalyticsHubServiceClient.common_location_path) - parse_common_location_path = staticmethod(AnalyticsHubServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceAsyncClient: The constructed client. - """ - return AnalyticsHubServiceClient.from_service_account_info.__func__(AnalyticsHubServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceAsyncClient: The constructed client. - """ - return AnalyticsHubServiceClient.from_service_account_file.__func__(AnalyticsHubServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AnalyticsHubServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AnalyticsHubServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AnalyticsHubServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AnalyticsHubServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AnalyticsHubServiceTransport, Callable[..., AnalyticsHubServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the analytics hub service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AnalyticsHubServiceTransport,Callable[..., AnalyticsHubServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AnalyticsHubServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AnalyticsHubServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.analyticshub_v1.AnalyticsHubServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "credentialsType": None, - } - ) - - async def list_data_exchanges(self, - request: Optional[Union[analyticshub.ListDataExchangesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataExchangesAsyncPager: - r"""Lists all data exchanges in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_list_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest, dict]]): - The request object. Message for requesting the list of - data exchanges. - parent (:class:`str`): - Required. The parent resource path of the data - exchanges. e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesAsyncPager: - Message for response to the list of - data exchanges. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListDataExchangesRequest): - request = analyticshub.ListDataExchangesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataExchangesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_org_data_exchanges(self, - request: Optional[Union[analyticshub.ListOrgDataExchangesRequest, dict]] = None, - *, - organization: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListOrgDataExchangesAsyncPager: - r"""Lists all data exchanges from projects in a given - organization and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest, dict]]): - The request object. Message for requesting the list of - data exchanges from projects in an - organization and location. - organization (:class:`str`): - Required. The organization resource path of the projects - containing DataExchanges. e.g. - ``organizations/myorg/locations/US``. - - This corresponds to the ``organization`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesAsyncPager: - Message for response to listing data - exchanges in an organization and - location. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [organization] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListOrgDataExchangesRequest): - request = analyticshub.ListOrgDataExchangesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if organization is not None: - request.organization = organization - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_org_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("organization", request.organization), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListOrgDataExchangesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_exchange(self, - request: Optional[Union[analyticshub.GetDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.DataExchange: - r"""Gets the details of a data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_get_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.GetDataExchangeRequest, dict]]): - The request object. Message for getting a data exchange. - name (:class:`str`): - Required. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.GetDataExchangeRequest): - request = analyticshub.GetDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_data_exchange(self, - request: Optional[Union[analyticshub.CreateDataExchangeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_exchange: Optional[analyticshub.DataExchange] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.DataExchange: - r"""Creates a new data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_create_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = await client.create_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.CreateDataExchangeRequest, dict]]): - The request object. Message for creating a data exchange. - parent (:class:`str`): - Required. The parent resource path of the data exchange. - e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_exchange (:class:`google.cloud.bigquery_analyticshub_v1.types.DataExchange`): - Required. The data exchange to - create. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_exchange] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.CreateDataExchangeRequest): - request = analyticshub.CreateDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_exchange is not None: - request.data_exchange = data_exchange - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_data_exchange(self, - request: Optional[Union[analyticshub.UpdateDataExchangeRequest, dict]] = None, - *, - data_exchange: Optional[analyticshub.DataExchange] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.DataExchange: - r"""Updates an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_update_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = await client.update_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.UpdateDataExchangeRequest, dict]]): - The request object. Message for updating a data exchange. - data_exchange (:class:`google.cloud.bigquery_analyticshub_v1.types.DataExchange`): - Required. The data exchange to - update. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask specifies the fields to update in - the data exchange resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_exchange, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.UpdateDataExchangeRequest): - request = analyticshub.UpdateDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_exchange is not None: - request.data_exchange = data_exchange - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_exchange.name", request.data_exchange.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_data_exchange(self, - request: Optional[Union[analyticshub.DeleteDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_delete_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_exchange(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.DeleteDataExchangeRequest, dict]]): - The request object. Message for deleting a data exchange. - name (:class:`str`): - Required. The full name of the data exchange resource - that you want to delete. For example, - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.DeleteDataExchangeRequest): - request = analyticshub.DeleteDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_listings(self, - request: Optional[Union[analyticshub.ListListingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListListingsAsyncPager: - r"""Lists all listings in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_list_listings(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest, dict]]): - The request object. Message for requesting the list of - listings. - parent (:class:`str`): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsAsyncPager: - Message for response to the list of - Listings. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListListingsRequest): - request = analyticshub.ListListingsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_listings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListListingsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_listing(self, - request: Optional[Union[analyticshub.GetListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Listing: - r"""Gets the details of a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_get_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.GetListingRequest, dict]]): - The request object. Message for getting a listing. - name (:class:`str`): - Required. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.GetListingRequest): - request = analyticshub.GetListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_listing(self, - request: Optional[Union[analyticshub.CreateListingRequest, dict]] = None, - *, - parent: Optional[str] = None, - listing: Optional[analyticshub.Listing] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Listing: - r"""Creates a new listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_create_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = await client.create_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.CreateListingRequest, dict]]): - The request object. Message for creating a listing. - parent (:class:`str`): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - listing (:class:`google.cloud.bigquery_analyticshub_v1.types.Listing`): - Required. The listing to create. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, listing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.CreateListingRequest): - request = analyticshub.CreateListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if listing is not None: - request.listing = listing - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_listing(self, - request: Optional[Union[analyticshub.UpdateListingRequest, dict]] = None, - *, - listing: Optional[analyticshub.Listing] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Listing: - r"""Updates an existing listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_update_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = await client.update_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.UpdateListingRequest, dict]]): - The request object. Message for updating a Listing. - listing (:class:`google.cloud.bigquery_analyticshub_v1.types.Listing`): - Required. The listing to update. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask specifies the fields to update in - the listing resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [listing, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.UpdateListingRequest): - request = analyticshub.UpdateListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if listing is not None: - request.listing = listing - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("listing.name", request.listing.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_listing(self, - request: Optional[Union[analyticshub.DeleteListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_delete_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - await client.delete_listing(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.DeleteListingRequest, dict]]): - The request object. Message for deleting a listing. - name (:class:`str`): - Required. Resource name of the listing to delete. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.DeleteListingRequest): - request = analyticshub.DeleteListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def subscribe_listing(self, - request: Optional[Union[analyticshub.SubscribeListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.SubscribeListingResponse: - r"""Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_subscribe_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - destination_dataset = bigquery_analyticshub_v1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_analyticshub_v1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = await client.subscribe_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.SubscribeListingRequest, dict]]): - The request object. Message for subscribing to a listing. - name (:class:`str`): - Required. Resource name of the listing that you want to - subscribe to. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.SubscribeListingResponse: - Message for response when you - subscribe to a listing. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.SubscribeListingRequest): - request = analyticshub.SubscribeListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.subscribe_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def subscribe_data_exchange(self, - request: Optional[Union[analyticshub.SubscribeDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Creates a Subscription to a Data Exchange. This is a - long-running operation as it will create one or more - linked datasets. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_subscribe_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( - name="name_value", - destination="destination_value", - subscription="subscription_value", - ) - - # Make the request - operation = client.subscribe_data_exchange(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest, dict]]): - The request object. Message for subscribing to a Data - Exchange. - name (:class:`str`): - Required. Resource name of the Data Exchange. e.g. - ``projects/publisherproject/locations/US/dataExchanges/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeResponse` - Message for response when you subscribe to a Data - Exchange. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.SubscribeDataExchangeRequest): - request = analyticshub.SubscribeDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.subscribe_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - analyticshub.SubscribeDataExchangeResponse, - metadata_type=analyticshub.OperationMetadata, - ) - - # Done; return the response. - return response - - async def refresh_subscription(self, - request: Optional[Union[analyticshub.RefreshSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Refreshes a Subscription to a Data Exchange. A Data - Exchange can become stale when a publisher adds or - removes data. This is a long-running operation as it may - create many linked datasets. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_refresh_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.refresh_subscription(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest, dict]]): - The request object. Message for refreshing a - subscription. - name (:class:`str`): - Required. Resource name of the Subscription to refresh. - e.g. - ``projects/subscriberproject/locations/US/subscriptions/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionResponse` - Message for response when you refresh a subscription. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.RefreshSubscriptionRequest): - request = analyticshub.RefreshSubscriptionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.refresh_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - analyticshub.RefreshSubscriptionResponse, - metadata_type=analyticshub.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_subscription(self, - request: Optional[Union[analyticshub.GetSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Subscription: - r"""Gets the details of a Subscription. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_get_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest, dict]]): - The request object. Message for getting a subscription. - name (:class:`str`): - Required. Resource name of the - subscription. e.g. - projects/123/locations/US/subscriptions/456 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Subscription: - A subscription represents a - subscribers' access to a particular set - of published data. It contains - references to associated listings, data - exchanges, and linked datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.GetSubscriptionRequest): - request = analyticshub.GetSubscriptionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_subscriptions(self, - request: Optional[Union[analyticshub.ListSubscriptionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSubscriptionsAsyncPager: - r"""Lists all subscriptions in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_list_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSubscriptionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest, dict]]): - The request object. Message for listing subscriptions. - parent (:class:`str`): - Required. The parent resource path of - the subscription. e.g. - projects/myproject/locations/US - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsAsyncPager: - Message for response to the listing - of subscriptions. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListSubscriptionsRequest): - request = analyticshub.ListSubscriptionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_subscriptions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSubscriptionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_shared_resource_subscriptions(self, - request: Optional[Union[analyticshub.ListSharedResourceSubscriptionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSharedResourceSubscriptionsAsyncPager: - r"""Lists all subscriptions on a given Data Exchange or - Listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_list_shared_resource_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( - resource="resource_value", - ) - - # Make the request - page_result = client.list_shared_resource_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest, dict]]): - The request object. Message for listing subscriptions of - a shared resource. - resource (:class:`str`): - Required. Resource name of the - requested target. This resource may be - either a Listing or a DataExchange. e.g. - projects/123/locations/US/dataExchanges/456 - OR e.g. - projects/123/locations/US/dataExchanges/456/listings/789 - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsAsyncPager: - Message for response to the listing - of shared resource subscriptions. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListSharedResourceSubscriptionsRequest): - request = analyticshub.ListSharedResourceSubscriptionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_shared_resource_subscriptions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSharedResourceSubscriptionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def revoke_subscription(self, - request: Optional[Union[analyticshub.RevokeSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.RevokeSubscriptionResponse: - r"""Revokes a given subscription. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_revoke_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = await client.revoke_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest, dict]]): - The request object. Message for revoking a subscription. - name (:class:`str`): - Required. Resource name of the - subscription to revoke. e.g. - projects/123/locations/US/subscriptions/456 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse: - Message for response when you revoke - a subscription. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.RevokeSubscriptionRequest): - request = analyticshub.RevokeSubscriptionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.revoke_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_subscription(self, - request: Optional[Union[analyticshub.DeleteSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Deletes a subscription. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - async def sample_delete_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_subscription(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest, dict]]): - The request object. Message for deleting a subscription. - name (:class:`str`): - Required. Resource name of the - subscription to delete. e.g. - projects/123/locations/US/subscriptions/456 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.DeleteSubscriptionRequest): - request = analyticshub.DeleteSubscriptionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - empty_pb2.Empty, - metadata_type=analyticshub.OperationMetadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the permissions that a caller has. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AnalyticsHubServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AnalyticsHubServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py deleted file mode 100644 index 1c9c296d73db..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ /dev/null @@ -1,3206 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_analyticshub_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.api_core import operation # type: ignore -from google.api_core import operation_async # type: ignore -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import pagers -from google.cloud.bigquery_analyticshub_v1.types import analyticshub -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AnalyticsHubServiceGrpcTransport -from .transports.grpc_asyncio import AnalyticsHubServiceGrpcAsyncIOTransport - - -class AnalyticsHubServiceClientMeta(type): - """Metaclass for the AnalyticsHubService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AnalyticsHubServiceTransport]] - _transport_registry["grpc"] = AnalyticsHubServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AnalyticsHubServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AnalyticsHubServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AnalyticsHubServiceClient(metaclass=AnalyticsHubServiceClientMeta): - """The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "analyticshub.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "analyticshub.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AnalyticsHubServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AnalyticsHubServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_exchange_path(project: str,location: str,data_exchange: str,) -> str: - """Returns a fully-qualified data_exchange string.""" - return "projects/{project}/locations/{location}/dataExchanges/{data_exchange}".format(project=project, location=location, data_exchange=data_exchange, ) - - @staticmethod - def parse_data_exchange_path(path: str) -> Dict[str,str]: - """Parses a data_exchange path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataExchanges/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str,dataset: str,) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str,str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def listing_path(project: str,location: str,data_exchange: str,listing: str,) -> str: - """Returns a fully-qualified listing string.""" - return "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format(project=project, location=location, data_exchange=data_exchange, listing=listing, ) - - @staticmethod - def parse_listing_path(path: str) -> Dict[str,str]: - """Parses a listing path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataExchanges/(?P.+?)/listings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def subscription_path(project: str,location: str,subscription: str,) -> str: - """Returns a fully-qualified subscription string.""" - return "projects/{project}/locations/{location}/subscriptions/{subscription}".format(project=project, location=location, subscription=subscription, ) - - @staticmethod - def parse_subscription_path(path: str) -> Dict[str,str]: - """Parses a subscription path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/subscriptions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def table_path(project: str,dataset: str,table: str,) -> str: - """Returns a fully-qualified table string.""" - return "projects/{project}/datasets/{dataset}/tables/{table}".format(project=project, dataset=dataset, table=table, ) - - @staticmethod - def parse_table_path(path: str) -> Dict[str,str]: - """Parses a table path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)/tables/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AnalyticsHubServiceTransport, Callable[..., AnalyticsHubServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the analytics hub service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AnalyticsHubServiceTransport,Callable[..., AnalyticsHubServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AnalyticsHubServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AnalyticsHubServiceClient._read_environment_variables() - self._client_cert_source = AnalyticsHubServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AnalyticsHubServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AnalyticsHubServiceTransport) - if transport_provided: - # transport is a AnalyticsHubServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AnalyticsHubServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AnalyticsHubServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AnalyticsHubServiceTransport], Callable[..., AnalyticsHubServiceTransport]] = ( - AnalyticsHubServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AnalyticsHubServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.analyticshub_v1.AnalyticsHubServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "credentialsType": None, - } - ) - - def list_data_exchanges(self, - request: Optional[Union[analyticshub.ListDataExchangesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataExchangesPager: - r"""Lists all data exchanges in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_list_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest, dict]): - The request object. Message for requesting the list of - data exchanges. - parent (str): - Required. The parent resource path of the data - exchanges. e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesPager: - Message for response to the list of - data exchanges. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListDataExchangesRequest): - request = analyticshub.ListDataExchangesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataExchangesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_org_data_exchanges(self, - request: Optional[Union[analyticshub.ListOrgDataExchangesRequest, dict]] = None, - *, - organization: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListOrgDataExchangesPager: - r"""Lists all data exchanges from projects in a given - organization and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest, dict]): - The request object. Message for requesting the list of - data exchanges from projects in an - organization and location. - organization (str): - Required. The organization resource path of the projects - containing DataExchanges. e.g. - ``organizations/myorg/locations/US``. - - This corresponds to the ``organization`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesPager: - Message for response to listing data - exchanges in an organization and - location. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [organization] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListOrgDataExchangesRequest): - request = analyticshub.ListOrgDataExchangesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if organization is not None: - request.organization = organization - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_org_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("organization", request.organization), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListOrgDataExchangesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_exchange(self, - request: Optional[Union[analyticshub.GetDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.DataExchange: - r"""Gets the details of a data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_get_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.GetDataExchangeRequest, dict]): - The request object. Message for getting a data exchange. - name (str): - Required. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.GetDataExchangeRequest): - request = analyticshub.GetDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_data_exchange(self, - request: Optional[Union[analyticshub.CreateDataExchangeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_exchange: Optional[analyticshub.DataExchange] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.DataExchange: - r"""Creates a new data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_create_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = client.create_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.CreateDataExchangeRequest, dict]): - The request object. Message for creating a data exchange. - parent (str): - Required. The parent resource path of the data exchange. - e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_exchange (google.cloud.bigquery_analyticshub_v1.types.DataExchange): - Required. The data exchange to - create. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_exchange] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.CreateDataExchangeRequest): - request = analyticshub.CreateDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_exchange is not None: - request.data_exchange = data_exchange - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_data_exchange(self, - request: Optional[Union[analyticshub.UpdateDataExchangeRequest, dict]] = None, - *, - data_exchange: Optional[analyticshub.DataExchange] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.DataExchange: - r"""Updates an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_update_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = client.update_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.UpdateDataExchangeRequest, dict]): - The request object. Message for updating a data exchange. - data_exchange (google.cloud.bigquery_analyticshub_v1.types.DataExchange): - Required. The data exchange to - update. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in - the data exchange resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_exchange, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.UpdateDataExchangeRequest): - request = analyticshub.UpdateDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_exchange is not None: - request.data_exchange = data_exchange - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_exchange.name", request.data_exchange.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_data_exchange(self, - request: Optional[Union[analyticshub.DeleteDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_delete_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - client.delete_data_exchange(request=request) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.DeleteDataExchangeRequest, dict]): - The request object. Message for deleting a data exchange. - name (str): - Required. The full name of the data exchange resource - that you want to delete. For example, - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.DeleteDataExchangeRequest): - request = analyticshub.DeleteDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_listings(self, - request: Optional[Union[analyticshub.ListListingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListListingsPager: - r"""Lists all listings in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_list_listings(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest, dict]): - The request object. Message for requesting the list of - listings. - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsPager: - Message for response to the list of - Listings. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListListingsRequest): - request = analyticshub.ListListingsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_listings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListListingsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_listing(self, - request: Optional[Union[analyticshub.GetListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Listing: - r"""Gets the details of a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_get_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = client.get_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.GetListingRequest, dict]): - The request object. Message for getting a listing. - name (str): - Required. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.GetListingRequest): - request = analyticshub.GetListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_listing(self, - request: Optional[Union[analyticshub.CreateListingRequest, dict]] = None, - *, - parent: Optional[str] = None, - listing: Optional[analyticshub.Listing] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Listing: - r"""Creates a new listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_create_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = client.create_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.CreateListingRequest, dict]): - The request object. Message for creating a listing. - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - listing (google.cloud.bigquery_analyticshub_v1.types.Listing): - Required. The listing to create. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, listing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.CreateListingRequest): - request = analyticshub.CreateListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if listing is not None: - request.listing = listing - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_listing(self, - request: Optional[Union[analyticshub.UpdateListingRequest, dict]] = None, - *, - listing: Optional[analyticshub.Listing] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Listing: - r"""Updates an existing listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_update_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = client.update_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.UpdateListingRequest, dict]): - The request object. Message for updating a Listing. - listing (google.cloud.bigquery_analyticshub_v1.types.Listing): - Required. The listing to update. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in - the listing resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [listing, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.UpdateListingRequest): - request = analyticshub.UpdateListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if listing is not None: - request.listing = listing - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("listing.name", request.listing.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_listing(self, - request: Optional[Union[analyticshub.DeleteListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_delete_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - client.delete_listing(request=request) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.DeleteListingRequest, dict]): - The request object. Message for deleting a listing. - name (str): - Required. Resource name of the listing to delete. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.DeleteListingRequest): - request = analyticshub.DeleteListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def subscribe_listing(self, - request: Optional[Union[analyticshub.SubscribeListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.SubscribeListingResponse: - r"""Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_subscribe_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - destination_dataset = bigquery_analyticshub_v1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_analyticshub_v1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = client.subscribe_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.SubscribeListingRequest, dict]): - The request object. Message for subscribing to a listing. - name (str): - Required. Resource name of the listing that you want to - subscribe to. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.SubscribeListingResponse: - Message for response when you - subscribe to a listing. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.SubscribeListingRequest): - request = analyticshub.SubscribeListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.subscribe_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def subscribe_data_exchange(self, - request: Optional[Union[analyticshub.SubscribeDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Creates a Subscription to a Data Exchange. This is a - long-running operation as it will create one or more - linked datasets. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_subscribe_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( - name="name_value", - destination="destination_value", - subscription="subscription_value", - ) - - # Make the request - operation = client.subscribe_data_exchange(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest, dict]): - The request object. Message for subscribing to a Data - Exchange. - name (str): - Required. Resource name of the Data Exchange. e.g. - ``projects/publisherproject/locations/US/dataExchanges/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeResponse` - Message for response when you subscribe to a Data - Exchange. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.SubscribeDataExchangeRequest): - request = analyticshub.SubscribeDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.subscribe_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - analyticshub.SubscribeDataExchangeResponse, - metadata_type=analyticshub.OperationMetadata, - ) - - # Done; return the response. - return response - - def refresh_subscription(self, - request: Optional[Union[analyticshub.RefreshSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Refreshes a Subscription to a Data Exchange. A Data - Exchange can become stale when a publisher adds or - removes data. This is a long-running operation as it may - create many linked datasets. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_refresh_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.refresh_subscription(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest, dict]): - The request object. Message for refreshing a - subscription. - name (str): - Required. Resource name of the Subscription to refresh. - e.g. - ``projects/subscriberproject/locations/US/subscriptions/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be - :class:`google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionResponse` - Message for response when you refresh a subscription. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.RefreshSubscriptionRequest): - request = analyticshub.RefreshSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.refresh_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - analyticshub.RefreshSubscriptionResponse, - metadata_type=analyticshub.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_subscription(self, - request: Optional[Union[analyticshub.GetSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.Subscription: - r"""Gets the details of a Subscription. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_get_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = client.get_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest, dict]): - The request object. Message for getting a subscription. - name (str): - Required. Resource name of the - subscription. e.g. - projects/123/locations/US/subscriptions/456 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.Subscription: - A subscription represents a - subscribers' access to a particular set - of published data. It contains - references to associated listings, data - exchanges, and linked datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.GetSubscriptionRequest): - request = analyticshub.GetSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_subscriptions(self, - request: Optional[Union[analyticshub.ListSubscriptionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSubscriptionsPager: - r"""Lists all subscriptions in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_list_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSubscriptionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest, dict]): - The request object. Message for listing subscriptions. - parent (str): - Required. The parent resource path of - the subscription. e.g. - projects/myproject/locations/US - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsPager: - Message for response to the listing - of subscriptions. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListSubscriptionsRequest): - request = analyticshub.ListSubscriptionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSubscriptionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_shared_resource_subscriptions(self, - request: Optional[Union[analyticshub.ListSharedResourceSubscriptionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSharedResourceSubscriptionsPager: - r"""Lists all subscriptions on a given Data Exchange or - Listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_list_shared_resource_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( - resource="resource_value", - ) - - # Make the request - page_result = client.list_shared_resource_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest, dict]): - The request object. Message for listing subscriptions of - a shared resource. - resource (str): - Required. Resource name of the - requested target. This resource may be - either a Listing or a DataExchange. e.g. - projects/123/locations/US/dataExchanges/456 - OR e.g. - projects/123/locations/US/dataExchanges/456/listings/789 - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsPager: - Message for response to the listing - of shared resource subscriptions. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.ListSharedResourceSubscriptionsRequest): - request = analyticshub.ListSharedResourceSubscriptionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_shared_resource_subscriptions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSharedResourceSubscriptionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def revoke_subscription(self, - request: Optional[Union[analyticshub.RevokeSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analyticshub.RevokeSubscriptionResponse: - r"""Revokes a given subscription. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_revoke_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = client.revoke_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest, dict]): - The request object. Message for revoking a subscription. - name (str): - Required. Resource name of the - subscription to revoke. e.g. - projects/123/locations/US/subscriptions/456 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse: - Message for response when you revoke - a subscription. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.RevokeSubscriptionRequest): - request = analyticshub.RevokeSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.revoke_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_subscription(self, - request: Optional[Union[analyticshub.DeleteSubscriptionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Deletes a subscription. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - - def sample_delete_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_subscription(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest, dict]): - The request object. Message for deleting a subscription. - name (str): - Required. Resource name of the - subscription to delete. e.g. - projects/123/locations/US/subscriptions/456 - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to - use it as the request or the response type of an API - method. For instance: - - service Foo { - rpc Bar(google.protobuf.Empty) returns - (google.protobuf.Empty); - - } - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analyticshub.DeleteSubscriptionRequest): - request = analyticshub.DeleteSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Wrap the response in an operation future. - response = operation.from_gapic( - response, - self._transport.operations_client, - empty_pb2.Empty, - metadata_type=analyticshub.OperationMetadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the permissions that a caller has. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_analyticshub_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AnalyticsHubServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AnalyticsHubServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py deleted file mode 100644 index f310b2642251..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/pagers.py +++ /dev/null @@ -1,722 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_analyticshub_v1.types import analyticshub - - -class ListDataExchangesPager: - """A pager for iterating through ``list_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., analyticshub.ListDataExchangesResponse], - request: analyticshub.ListDataExchangesRequest, - response: analyticshub.ListDataExchangesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[analyticshub.ListDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyticshub.DataExchange]: - for page in self.pages: - yield from page.data_exchanges - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataExchangesAsyncPager: - """A pager for iterating through ``list_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[analyticshub.ListDataExchangesResponse]], - request: analyticshub.ListDataExchangesRequest, - response: analyticshub.ListDataExchangesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[analyticshub.ListDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyticshub.DataExchange]: - async def async_generator(): - async for page in self.pages: - for response in page.data_exchanges: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListOrgDataExchangesPager: - """A pager for iterating through ``list_org_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListOrgDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., analyticshub.ListOrgDataExchangesResponse], - request: analyticshub.ListOrgDataExchangesRequest, - response: analyticshub.ListOrgDataExchangesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListOrgDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[analyticshub.ListOrgDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyticshub.DataExchange]: - for page in self.pages: - yield from page.data_exchanges - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListOrgDataExchangesAsyncPager: - """A pager for iterating through ``list_org_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListOrgDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[analyticshub.ListOrgDataExchangesResponse]], - request: analyticshub.ListOrgDataExchangesRequest, - response: analyticshub.ListOrgDataExchangesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListOrgDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[analyticshub.ListOrgDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyticshub.DataExchange]: - async def async_generator(): - async for page in self.pages: - for response in page.data_exchanges: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListListingsPager: - """A pager for iterating through ``list_listings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListListingsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``listings`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListListings`` requests and continue to iterate - through the ``listings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListListingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., analyticshub.ListListingsResponse], - request: analyticshub.ListListingsRequest, - response: analyticshub.ListListingsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListListingsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListListingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[analyticshub.ListListingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyticshub.Listing]: - for page in self.pages: - yield from page.listings - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListListingsAsyncPager: - """A pager for iterating through ``list_listings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListListingsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``listings`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListListings`` requests and continue to iterate - through the ``listings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListListingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[analyticshub.ListListingsResponse]], - request: analyticshub.ListListingsRequest, - response: analyticshub.ListListingsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListListingsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListListingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[analyticshub.ListListingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyticshub.Listing]: - async def async_generator(): - async for page in self.pages: - for response in page.listings: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSubscriptionsPager: - """A pager for iterating through ``list_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``subscriptions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSubscriptions`` requests and continue to iterate - through the ``subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., analyticshub.ListSubscriptionsResponse], - request: analyticshub.ListSubscriptionsRequest, - response: analyticshub.ListSubscriptionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListSubscriptionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[analyticshub.ListSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyticshub.Subscription]: - for page in self.pages: - yield from page.subscriptions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSubscriptionsAsyncPager: - """A pager for iterating through ``list_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``subscriptions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSubscriptions`` requests and continue to iterate - through the ``subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[analyticshub.ListSubscriptionsResponse]], - request: analyticshub.ListSubscriptionsRequest, - response: analyticshub.ListSubscriptionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListSubscriptionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[analyticshub.ListSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyticshub.Subscription]: - async def async_generator(): - async for page in self.pages: - for response in page.subscriptions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSharedResourceSubscriptionsPager: - """A pager for iterating through ``list_shared_resource_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``shared_resource_subscriptions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSharedResourceSubscriptions`` requests and continue to iterate - through the ``shared_resource_subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., analyticshub.ListSharedResourceSubscriptionsResponse], - request: analyticshub.ListSharedResourceSubscriptionsRequest, - response: analyticshub.ListSharedResourceSubscriptionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListSharedResourceSubscriptionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[analyticshub.ListSharedResourceSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[analyticshub.Subscription]: - for page in self.pages: - yield from page.shared_resource_subscriptions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSharedResourceSubscriptionsAsyncPager: - """A pager for iterating through ``list_shared_resource_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``shared_resource_subscriptions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSharedResourceSubscriptions`` requests and continue to iterate - through the ``shared_resource_subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[analyticshub.ListSharedResourceSubscriptionsResponse]], - request: analyticshub.ListSharedResourceSubscriptionsRequest, - response: analyticshub.ListSharedResourceSubscriptionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest): - The initial request object. - response (google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = analyticshub.ListSharedResourceSubscriptionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[analyticshub.ListSharedResourceSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[analyticshub.Subscription]: - async def async_generator(): - async for page in self.pages: - for response in page.shared_resource_subscriptions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/README.rst deleted file mode 100644 index dbc9199b0730..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AnalyticsHubServiceTransport` is the ABC for all transports. -- public child `AnalyticsHubServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AnalyticsHubServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAnalyticsHubServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AnalyticsHubServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/__init__.py deleted file mode 100644 index 0627131b6136..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AnalyticsHubServiceTransport -from .grpc import AnalyticsHubServiceGrpcTransport -from .grpc_asyncio import AnalyticsHubServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AnalyticsHubServiceTransport]] -_transport_registry['grpc'] = AnalyticsHubServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AnalyticsHubServiceGrpcAsyncIOTransport - -__all__ = ( - 'AnalyticsHubServiceTransport', - 'AnalyticsHubServiceGrpcTransport', - 'AnalyticsHubServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py deleted file mode 100644 index f91b8b1e2f60..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/base.py +++ /dev/null @@ -1,459 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_analyticshub_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_analyticshub_v1.types import analyticshub -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AnalyticsHubServiceTransport(abc.ABC): - """Abstract transport class for AnalyticsHubService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'analyticshub.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'analyticshub.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_data_exchanges: gapic_v1.method.wrap_method( - self.list_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.list_org_data_exchanges: gapic_v1.method.wrap_method( - self.list_org_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.get_data_exchange: gapic_v1.method.wrap_method( - self.get_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.create_data_exchange: gapic_v1.method.wrap_method( - self.create_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.update_data_exchange: gapic_v1.method.wrap_method( - self.update_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_exchange: gapic_v1.method.wrap_method( - self.delete_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.list_listings: gapic_v1.method.wrap_method( - self.list_listings, - default_timeout=None, - client_info=client_info, - ), - self.get_listing: gapic_v1.method.wrap_method( - self.get_listing, - default_timeout=None, - client_info=client_info, - ), - self.create_listing: gapic_v1.method.wrap_method( - self.create_listing, - default_timeout=None, - client_info=client_info, - ), - self.update_listing: gapic_v1.method.wrap_method( - self.update_listing, - default_timeout=None, - client_info=client_info, - ), - self.delete_listing: gapic_v1.method.wrap_method( - self.delete_listing, - default_timeout=None, - client_info=client_info, - ), - self.subscribe_listing: gapic_v1.method.wrap_method( - self.subscribe_listing, - default_timeout=None, - client_info=client_info, - ), - self.subscribe_data_exchange: gapic_v1.method.wrap_method( - self.subscribe_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.refresh_subscription: gapic_v1.method.wrap_method( - self.refresh_subscription, - default_timeout=None, - client_info=client_info, - ), - self.get_subscription: gapic_v1.method.wrap_method( - self.get_subscription, - default_timeout=None, - client_info=client_info, - ), - self.list_subscriptions: gapic_v1.method.wrap_method( - self.list_subscriptions, - default_timeout=None, - client_info=client_info, - ), - self.list_shared_resource_subscriptions: gapic_v1.method.wrap_method( - self.list_shared_resource_subscriptions, - default_timeout=None, - client_info=client_info, - ), - self.revoke_subscription: gapic_v1.method.wrap_method( - self.revoke_subscription, - default_timeout=None, - client_info=client_info, - ), - self.delete_subscription: gapic_v1.method.wrap_method( - self.delete_subscription, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def operations_client(self): - """Return the client designed to process long-running operations.""" - raise NotImplementedError() - - @property - def list_data_exchanges(self) -> Callable[ - [analyticshub.ListDataExchangesRequest], - Union[ - analyticshub.ListDataExchangesResponse, - Awaitable[analyticshub.ListDataExchangesResponse] - ]]: - raise NotImplementedError() - - @property - def list_org_data_exchanges(self) -> Callable[ - [analyticshub.ListOrgDataExchangesRequest], - Union[ - analyticshub.ListOrgDataExchangesResponse, - Awaitable[analyticshub.ListOrgDataExchangesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_exchange(self) -> Callable[ - [analyticshub.GetDataExchangeRequest], - Union[ - analyticshub.DataExchange, - Awaitable[analyticshub.DataExchange] - ]]: - raise NotImplementedError() - - @property - def create_data_exchange(self) -> Callable[ - [analyticshub.CreateDataExchangeRequest], - Union[ - analyticshub.DataExchange, - Awaitable[analyticshub.DataExchange] - ]]: - raise NotImplementedError() - - @property - def update_data_exchange(self) -> Callable[ - [analyticshub.UpdateDataExchangeRequest], - Union[ - analyticshub.DataExchange, - Awaitable[analyticshub.DataExchange] - ]]: - raise NotImplementedError() - - @property - def delete_data_exchange(self) -> Callable[ - [analyticshub.DeleteDataExchangeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_listings(self) -> Callable[ - [analyticshub.ListListingsRequest], - Union[ - analyticshub.ListListingsResponse, - Awaitable[analyticshub.ListListingsResponse] - ]]: - raise NotImplementedError() - - @property - def get_listing(self) -> Callable[ - [analyticshub.GetListingRequest], - Union[ - analyticshub.Listing, - Awaitable[analyticshub.Listing] - ]]: - raise NotImplementedError() - - @property - def create_listing(self) -> Callable[ - [analyticshub.CreateListingRequest], - Union[ - analyticshub.Listing, - Awaitable[analyticshub.Listing] - ]]: - raise NotImplementedError() - - @property - def update_listing(self) -> Callable[ - [analyticshub.UpdateListingRequest], - Union[ - analyticshub.Listing, - Awaitable[analyticshub.Listing] - ]]: - raise NotImplementedError() - - @property - def delete_listing(self) -> Callable[ - [analyticshub.DeleteListingRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def subscribe_listing(self) -> Callable[ - [analyticshub.SubscribeListingRequest], - Union[ - analyticshub.SubscribeListingResponse, - Awaitable[analyticshub.SubscribeListingResponse] - ]]: - raise NotImplementedError() - - @property - def subscribe_data_exchange(self) -> Callable[ - [analyticshub.SubscribeDataExchangeRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def refresh_subscription(self) -> Callable[ - [analyticshub.RefreshSubscriptionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_subscription(self) -> Callable[ - [analyticshub.GetSubscriptionRequest], - Union[ - analyticshub.Subscription, - Awaitable[analyticshub.Subscription] - ]]: - raise NotImplementedError() - - @property - def list_subscriptions(self) -> Callable[ - [analyticshub.ListSubscriptionsRequest], - Union[ - analyticshub.ListSubscriptionsResponse, - Awaitable[analyticshub.ListSubscriptionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_shared_resource_subscriptions(self) -> Callable[ - [analyticshub.ListSharedResourceSubscriptionsRequest], - Union[ - analyticshub.ListSharedResourceSubscriptionsResponse, - Awaitable[analyticshub.ListSharedResourceSubscriptionsResponse] - ]]: - raise NotImplementedError() - - @property - def revoke_subscription(self) -> Callable[ - [analyticshub.RevokeSubscriptionRequest], - Union[ - analyticshub.RevokeSubscriptionResponse, - Awaitable[analyticshub.RevokeSubscriptionResponse] - ]]: - raise NotImplementedError() - - @property - def delete_subscription(self) -> Callable[ - [analyticshub.DeleteSubscriptionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AnalyticsHubServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py deleted file mode 100644 index 515b1f2af28a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc.py +++ /dev/null @@ -1,937 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_analyticshub_v1.types import analyticshub -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AnalyticsHubServiceGrpcTransport(AnalyticsHubServiceTransport): - """gRPC backend transport for AnalyticsHubService. - - The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'analyticshub.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_data_exchanges(self) -> Callable[ - [analyticshub.ListDataExchangesRequest], - analyticshub.ListDataExchangesResponse]: - r"""Return a callable for the list data exchanges method over gRPC. - - Lists all data exchanges in a given project and - location. - - Returns: - Callable[[~.ListDataExchangesRequest], - ~.ListDataExchangesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_exchanges' not in self._stubs: - self._stubs['list_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListDataExchanges', - request_serializer=analyticshub.ListDataExchangesRequest.serialize, - response_deserializer=analyticshub.ListDataExchangesResponse.deserialize, - ) - return self._stubs['list_data_exchanges'] - - @property - def list_org_data_exchanges(self) -> Callable[ - [analyticshub.ListOrgDataExchangesRequest], - analyticshub.ListOrgDataExchangesResponse]: - r"""Return a callable for the list org data exchanges method over gRPC. - - Lists all data exchanges from projects in a given - organization and location. - - Returns: - Callable[[~.ListOrgDataExchangesRequest], - ~.ListOrgDataExchangesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_org_data_exchanges' not in self._stubs: - self._stubs['list_org_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListOrgDataExchanges', - request_serializer=analyticshub.ListOrgDataExchangesRequest.serialize, - response_deserializer=analyticshub.ListOrgDataExchangesResponse.deserialize, - ) - return self._stubs['list_org_data_exchanges'] - - @property - def get_data_exchange(self) -> Callable[ - [analyticshub.GetDataExchangeRequest], - analyticshub.DataExchange]: - r"""Return a callable for the get data exchange method over gRPC. - - Gets the details of a data exchange. - - Returns: - Callable[[~.GetDataExchangeRequest], - ~.DataExchange]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_exchange' not in self._stubs: - self._stubs['get_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetDataExchange', - request_serializer=analyticshub.GetDataExchangeRequest.serialize, - response_deserializer=analyticshub.DataExchange.deserialize, - ) - return self._stubs['get_data_exchange'] - - @property - def create_data_exchange(self) -> Callable[ - [analyticshub.CreateDataExchangeRequest], - analyticshub.DataExchange]: - r"""Return a callable for the create data exchange method over gRPC. - - Creates a new data exchange. - - Returns: - Callable[[~.CreateDataExchangeRequest], - ~.DataExchange]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_exchange' not in self._stubs: - self._stubs['create_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/CreateDataExchange', - request_serializer=analyticshub.CreateDataExchangeRequest.serialize, - response_deserializer=analyticshub.DataExchange.deserialize, - ) - return self._stubs['create_data_exchange'] - - @property - def update_data_exchange(self) -> Callable[ - [analyticshub.UpdateDataExchangeRequest], - analyticshub.DataExchange]: - r"""Return a callable for the update data exchange method over gRPC. - - Updates an existing data exchange. - - Returns: - Callable[[~.UpdateDataExchangeRequest], - ~.DataExchange]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_exchange' not in self._stubs: - self._stubs['update_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/UpdateDataExchange', - request_serializer=analyticshub.UpdateDataExchangeRequest.serialize, - response_deserializer=analyticshub.DataExchange.deserialize, - ) - return self._stubs['update_data_exchange'] - - @property - def delete_data_exchange(self) -> Callable[ - [analyticshub.DeleteDataExchangeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete data exchange method over gRPC. - - Deletes an existing data exchange. - - Returns: - Callable[[~.DeleteDataExchangeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_exchange' not in self._stubs: - self._stubs['delete_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteDataExchange', - request_serializer=analyticshub.DeleteDataExchangeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_exchange'] - - @property - def list_listings(self) -> Callable[ - [analyticshub.ListListingsRequest], - analyticshub.ListListingsResponse]: - r"""Return a callable for the list listings method over gRPC. - - Lists all listings in a given project and location. - - Returns: - Callable[[~.ListListingsRequest], - ~.ListListingsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_listings' not in self._stubs: - self._stubs['list_listings'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListListings', - request_serializer=analyticshub.ListListingsRequest.serialize, - response_deserializer=analyticshub.ListListingsResponse.deserialize, - ) - return self._stubs['list_listings'] - - @property - def get_listing(self) -> Callable[ - [analyticshub.GetListingRequest], - analyticshub.Listing]: - r"""Return a callable for the get listing method over gRPC. - - Gets the details of a listing. - - Returns: - Callable[[~.GetListingRequest], - ~.Listing]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_listing' not in self._stubs: - self._stubs['get_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetListing', - request_serializer=analyticshub.GetListingRequest.serialize, - response_deserializer=analyticshub.Listing.deserialize, - ) - return self._stubs['get_listing'] - - @property - def create_listing(self) -> Callable[ - [analyticshub.CreateListingRequest], - analyticshub.Listing]: - r"""Return a callable for the create listing method over gRPC. - - Creates a new listing. - - Returns: - Callable[[~.CreateListingRequest], - ~.Listing]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_listing' not in self._stubs: - self._stubs['create_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/CreateListing', - request_serializer=analyticshub.CreateListingRequest.serialize, - response_deserializer=analyticshub.Listing.deserialize, - ) - return self._stubs['create_listing'] - - @property - def update_listing(self) -> Callable[ - [analyticshub.UpdateListingRequest], - analyticshub.Listing]: - r"""Return a callable for the update listing method over gRPC. - - Updates an existing listing. - - Returns: - Callable[[~.UpdateListingRequest], - ~.Listing]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_listing' not in self._stubs: - self._stubs['update_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/UpdateListing', - request_serializer=analyticshub.UpdateListingRequest.serialize, - response_deserializer=analyticshub.Listing.deserialize, - ) - return self._stubs['update_listing'] - - @property - def delete_listing(self) -> Callable[ - [analyticshub.DeleteListingRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete listing method over gRPC. - - Deletes a listing. - - Returns: - Callable[[~.DeleteListingRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_listing' not in self._stubs: - self._stubs['delete_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteListing', - request_serializer=analyticshub.DeleteListingRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_listing'] - - @property - def subscribe_listing(self) -> Callable[ - [analyticshub.SubscribeListingRequest], - analyticshub.SubscribeListingResponse]: - r"""Return a callable for the subscribe listing method over gRPC. - - Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - Returns: - Callable[[~.SubscribeListingRequest], - ~.SubscribeListingResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'subscribe_listing' not in self._stubs: - self._stubs['subscribe_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SubscribeListing', - request_serializer=analyticshub.SubscribeListingRequest.serialize, - response_deserializer=analyticshub.SubscribeListingResponse.deserialize, - ) - return self._stubs['subscribe_listing'] - - @property - def subscribe_data_exchange(self) -> Callable[ - [analyticshub.SubscribeDataExchangeRequest], - operations_pb2.Operation]: - r"""Return a callable for the subscribe data exchange method over gRPC. - - Creates a Subscription to a Data Exchange. This is a - long-running operation as it will create one or more - linked datasets. - - Returns: - Callable[[~.SubscribeDataExchangeRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'subscribe_data_exchange' not in self._stubs: - self._stubs['subscribe_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SubscribeDataExchange', - request_serializer=analyticshub.SubscribeDataExchangeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['subscribe_data_exchange'] - - @property - def refresh_subscription(self) -> Callable[ - [analyticshub.RefreshSubscriptionRequest], - operations_pb2.Operation]: - r"""Return a callable for the refresh subscription method over gRPC. - - Refreshes a Subscription to a Data Exchange. A Data - Exchange can become stale when a publisher adds or - removes data. This is a long-running operation as it may - create many linked datasets. - - Returns: - Callable[[~.RefreshSubscriptionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'refresh_subscription' not in self._stubs: - self._stubs['refresh_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RefreshSubscription', - request_serializer=analyticshub.RefreshSubscriptionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['refresh_subscription'] - - @property - def get_subscription(self) -> Callable[ - [analyticshub.GetSubscriptionRequest], - analyticshub.Subscription]: - r"""Return a callable for the get subscription method over gRPC. - - Gets the details of a Subscription. - - Returns: - Callable[[~.GetSubscriptionRequest], - ~.Subscription]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_subscription' not in self._stubs: - self._stubs['get_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetSubscription', - request_serializer=analyticshub.GetSubscriptionRequest.serialize, - response_deserializer=analyticshub.Subscription.deserialize, - ) - return self._stubs['get_subscription'] - - @property - def list_subscriptions(self) -> Callable[ - [analyticshub.ListSubscriptionsRequest], - analyticshub.ListSubscriptionsResponse]: - r"""Return a callable for the list subscriptions method over gRPC. - - Lists all subscriptions in a given project and - location. - - Returns: - Callable[[~.ListSubscriptionsRequest], - ~.ListSubscriptionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_subscriptions' not in self._stubs: - self._stubs['list_subscriptions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSubscriptions', - request_serializer=analyticshub.ListSubscriptionsRequest.serialize, - response_deserializer=analyticshub.ListSubscriptionsResponse.deserialize, - ) - return self._stubs['list_subscriptions'] - - @property - def list_shared_resource_subscriptions(self) -> Callable[ - [analyticshub.ListSharedResourceSubscriptionsRequest], - analyticshub.ListSharedResourceSubscriptionsResponse]: - r"""Return a callable for the list shared resource - subscriptions method over gRPC. - - Lists all subscriptions on a given Data Exchange or - Listing. - - Returns: - Callable[[~.ListSharedResourceSubscriptionsRequest], - ~.ListSharedResourceSubscriptionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_shared_resource_subscriptions' not in self._stubs: - self._stubs['list_shared_resource_subscriptions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSharedResourceSubscriptions', - request_serializer=analyticshub.ListSharedResourceSubscriptionsRequest.serialize, - response_deserializer=analyticshub.ListSharedResourceSubscriptionsResponse.deserialize, - ) - return self._stubs['list_shared_resource_subscriptions'] - - @property - def revoke_subscription(self) -> Callable[ - [analyticshub.RevokeSubscriptionRequest], - analyticshub.RevokeSubscriptionResponse]: - r"""Return a callable for the revoke subscription method over gRPC. - - Revokes a given subscription. - - Returns: - Callable[[~.RevokeSubscriptionRequest], - ~.RevokeSubscriptionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'revoke_subscription' not in self._stubs: - self._stubs['revoke_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RevokeSubscription', - request_serializer=analyticshub.RevokeSubscriptionRequest.serialize, - response_deserializer=analyticshub.RevokeSubscriptionResponse.deserialize, - ) - return self._stubs['revoke_subscription'] - - @property - def delete_subscription(self) -> Callable[ - [analyticshub.DeleteSubscriptionRequest], - operations_pb2.Operation]: - r"""Return a callable for the delete subscription method over gRPC. - - Deletes a subscription. - - Returns: - Callable[[~.DeleteSubscriptionRequest], - ~.Operation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_subscription' not in self._stubs: - self._stubs['delete_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteSubscription', - request_serializer=analyticshub.DeleteSubscriptionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_subscription'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the permissions that a caller has. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AnalyticsHubServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py deleted file mode 100644 index 5a8d3bc69b8e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1062 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_analyticshub_v1.types import analyticshub -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AnalyticsHubServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AnalyticsHubServiceGrpcAsyncIOTransport(AnalyticsHubServiceTransport): - """gRPC AsyncIO backend transport for AnalyticsHubService. - - The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'analyticshub.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def operations_client(self) -> operations_v1.OperationsAsyncClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Quick check: Only create a new client if we do not already have one. - if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) - - # Return the client from cache. - return self._operations_client - - @property - def list_data_exchanges(self) -> Callable[ - [analyticshub.ListDataExchangesRequest], - Awaitable[analyticshub.ListDataExchangesResponse]]: - r"""Return a callable for the list data exchanges method over gRPC. - - Lists all data exchanges in a given project and - location. - - Returns: - Callable[[~.ListDataExchangesRequest], - Awaitable[~.ListDataExchangesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_exchanges' not in self._stubs: - self._stubs['list_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListDataExchanges', - request_serializer=analyticshub.ListDataExchangesRequest.serialize, - response_deserializer=analyticshub.ListDataExchangesResponse.deserialize, - ) - return self._stubs['list_data_exchanges'] - - @property - def list_org_data_exchanges(self) -> Callable[ - [analyticshub.ListOrgDataExchangesRequest], - Awaitable[analyticshub.ListOrgDataExchangesResponse]]: - r"""Return a callable for the list org data exchanges method over gRPC. - - Lists all data exchanges from projects in a given - organization and location. - - Returns: - Callable[[~.ListOrgDataExchangesRequest], - Awaitable[~.ListOrgDataExchangesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_org_data_exchanges' not in self._stubs: - self._stubs['list_org_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListOrgDataExchanges', - request_serializer=analyticshub.ListOrgDataExchangesRequest.serialize, - response_deserializer=analyticshub.ListOrgDataExchangesResponse.deserialize, - ) - return self._stubs['list_org_data_exchanges'] - - @property - def get_data_exchange(self) -> Callable[ - [analyticshub.GetDataExchangeRequest], - Awaitable[analyticshub.DataExchange]]: - r"""Return a callable for the get data exchange method over gRPC. - - Gets the details of a data exchange. - - Returns: - Callable[[~.GetDataExchangeRequest], - Awaitable[~.DataExchange]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_exchange' not in self._stubs: - self._stubs['get_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetDataExchange', - request_serializer=analyticshub.GetDataExchangeRequest.serialize, - response_deserializer=analyticshub.DataExchange.deserialize, - ) - return self._stubs['get_data_exchange'] - - @property - def create_data_exchange(self) -> Callable[ - [analyticshub.CreateDataExchangeRequest], - Awaitable[analyticshub.DataExchange]]: - r"""Return a callable for the create data exchange method over gRPC. - - Creates a new data exchange. - - Returns: - Callable[[~.CreateDataExchangeRequest], - Awaitable[~.DataExchange]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_exchange' not in self._stubs: - self._stubs['create_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/CreateDataExchange', - request_serializer=analyticshub.CreateDataExchangeRequest.serialize, - response_deserializer=analyticshub.DataExchange.deserialize, - ) - return self._stubs['create_data_exchange'] - - @property - def update_data_exchange(self) -> Callable[ - [analyticshub.UpdateDataExchangeRequest], - Awaitable[analyticshub.DataExchange]]: - r"""Return a callable for the update data exchange method over gRPC. - - Updates an existing data exchange. - - Returns: - Callable[[~.UpdateDataExchangeRequest], - Awaitable[~.DataExchange]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_exchange' not in self._stubs: - self._stubs['update_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/UpdateDataExchange', - request_serializer=analyticshub.UpdateDataExchangeRequest.serialize, - response_deserializer=analyticshub.DataExchange.deserialize, - ) - return self._stubs['update_data_exchange'] - - @property - def delete_data_exchange(self) -> Callable[ - [analyticshub.DeleteDataExchangeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete data exchange method over gRPC. - - Deletes an existing data exchange. - - Returns: - Callable[[~.DeleteDataExchangeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_exchange' not in self._stubs: - self._stubs['delete_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteDataExchange', - request_serializer=analyticshub.DeleteDataExchangeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_exchange'] - - @property - def list_listings(self) -> Callable[ - [analyticshub.ListListingsRequest], - Awaitable[analyticshub.ListListingsResponse]]: - r"""Return a callable for the list listings method over gRPC. - - Lists all listings in a given project and location. - - Returns: - Callable[[~.ListListingsRequest], - Awaitable[~.ListListingsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_listings' not in self._stubs: - self._stubs['list_listings'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListListings', - request_serializer=analyticshub.ListListingsRequest.serialize, - response_deserializer=analyticshub.ListListingsResponse.deserialize, - ) - return self._stubs['list_listings'] - - @property - def get_listing(self) -> Callable[ - [analyticshub.GetListingRequest], - Awaitable[analyticshub.Listing]]: - r"""Return a callable for the get listing method over gRPC. - - Gets the details of a listing. - - Returns: - Callable[[~.GetListingRequest], - Awaitable[~.Listing]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_listing' not in self._stubs: - self._stubs['get_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetListing', - request_serializer=analyticshub.GetListingRequest.serialize, - response_deserializer=analyticshub.Listing.deserialize, - ) - return self._stubs['get_listing'] - - @property - def create_listing(self) -> Callable[ - [analyticshub.CreateListingRequest], - Awaitable[analyticshub.Listing]]: - r"""Return a callable for the create listing method over gRPC. - - Creates a new listing. - - Returns: - Callable[[~.CreateListingRequest], - Awaitable[~.Listing]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_listing' not in self._stubs: - self._stubs['create_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/CreateListing', - request_serializer=analyticshub.CreateListingRequest.serialize, - response_deserializer=analyticshub.Listing.deserialize, - ) - return self._stubs['create_listing'] - - @property - def update_listing(self) -> Callable[ - [analyticshub.UpdateListingRequest], - Awaitable[analyticshub.Listing]]: - r"""Return a callable for the update listing method over gRPC. - - Updates an existing listing. - - Returns: - Callable[[~.UpdateListingRequest], - Awaitable[~.Listing]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_listing' not in self._stubs: - self._stubs['update_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/UpdateListing', - request_serializer=analyticshub.UpdateListingRequest.serialize, - response_deserializer=analyticshub.Listing.deserialize, - ) - return self._stubs['update_listing'] - - @property - def delete_listing(self) -> Callable[ - [analyticshub.DeleteListingRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete listing method over gRPC. - - Deletes a listing. - - Returns: - Callable[[~.DeleteListingRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_listing' not in self._stubs: - self._stubs['delete_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteListing', - request_serializer=analyticshub.DeleteListingRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_listing'] - - @property - def subscribe_listing(self) -> Callable[ - [analyticshub.SubscribeListingRequest], - Awaitable[analyticshub.SubscribeListingResponse]]: - r"""Return a callable for the subscribe listing method over gRPC. - - Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - Returns: - Callable[[~.SubscribeListingRequest], - Awaitable[~.SubscribeListingResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'subscribe_listing' not in self._stubs: - self._stubs['subscribe_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SubscribeListing', - request_serializer=analyticshub.SubscribeListingRequest.serialize, - response_deserializer=analyticshub.SubscribeListingResponse.deserialize, - ) - return self._stubs['subscribe_listing'] - - @property - def subscribe_data_exchange(self) -> Callable[ - [analyticshub.SubscribeDataExchangeRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the subscribe data exchange method over gRPC. - - Creates a Subscription to a Data Exchange. This is a - long-running operation as it will create one or more - linked datasets. - - Returns: - Callable[[~.SubscribeDataExchangeRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'subscribe_data_exchange' not in self._stubs: - self._stubs['subscribe_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SubscribeDataExchange', - request_serializer=analyticshub.SubscribeDataExchangeRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['subscribe_data_exchange'] - - @property - def refresh_subscription(self) -> Callable[ - [analyticshub.RefreshSubscriptionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the refresh subscription method over gRPC. - - Refreshes a Subscription to a Data Exchange. A Data - Exchange can become stale when a publisher adds or - removes data. This is a long-running operation as it may - create many linked datasets. - - Returns: - Callable[[~.RefreshSubscriptionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'refresh_subscription' not in self._stubs: - self._stubs['refresh_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RefreshSubscription', - request_serializer=analyticshub.RefreshSubscriptionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['refresh_subscription'] - - @property - def get_subscription(self) -> Callable[ - [analyticshub.GetSubscriptionRequest], - Awaitable[analyticshub.Subscription]]: - r"""Return a callable for the get subscription method over gRPC. - - Gets the details of a Subscription. - - Returns: - Callable[[~.GetSubscriptionRequest], - Awaitable[~.Subscription]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_subscription' not in self._stubs: - self._stubs['get_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetSubscription', - request_serializer=analyticshub.GetSubscriptionRequest.serialize, - response_deserializer=analyticshub.Subscription.deserialize, - ) - return self._stubs['get_subscription'] - - @property - def list_subscriptions(self) -> Callable[ - [analyticshub.ListSubscriptionsRequest], - Awaitable[analyticshub.ListSubscriptionsResponse]]: - r"""Return a callable for the list subscriptions method over gRPC. - - Lists all subscriptions in a given project and - location. - - Returns: - Callable[[~.ListSubscriptionsRequest], - Awaitable[~.ListSubscriptionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_subscriptions' not in self._stubs: - self._stubs['list_subscriptions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSubscriptions', - request_serializer=analyticshub.ListSubscriptionsRequest.serialize, - response_deserializer=analyticshub.ListSubscriptionsResponse.deserialize, - ) - return self._stubs['list_subscriptions'] - - @property - def list_shared_resource_subscriptions(self) -> Callable[ - [analyticshub.ListSharedResourceSubscriptionsRequest], - Awaitable[analyticshub.ListSharedResourceSubscriptionsResponse]]: - r"""Return a callable for the list shared resource - subscriptions method over gRPC. - - Lists all subscriptions on a given Data Exchange or - Listing. - - Returns: - Callable[[~.ListSharedResourceSubscriptionsRequest], - Awaitable[~.ListSharedResourceSubscriptionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_shared_resource_subscriptions' not in self._stubs: - self._stubs['list_shared_resource_subscriptions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/ListSharedResourceSubscriptions', - request_serializer=analyticshub.ListSharedResourceSubscriptionsRequest.serialize, - response_deserializer=analyticshub.ListSharedResourceSubscriptionsResponse.deserialize, - ) - return self._stubs['list_shared_resource_subscriptions'] - - @property - def revoke_subscription(self) -> Callable[ - [analyticshub.RevokeSubscriptionRequest], - Awaitable[analyticshub.RevokeSubscriptionResponse]]: - r"""Return a callable for the revoke subscription method over gRPC. - - Revokes a given subscription. - - Returns: - Callable[[~.RevokeSubscriptionRequest], - Awaitable[~.RevokeSubscriptionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'revoke_subscription' not in self._stubs: - self._stubs['revoke_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/RevokeSubscription', - request_serializer=analyticshub.RevokeSubscriptionRequest.serialize, - response_deserializer=analyticshub.RevokeSubscriptionResponse.deserialize, - ) - return self._stubs['revoke_subscription'] - - @property - def delete_subscription(self) -> Callable[ - [analyticshub.DeleteSubscriptionRequest], - Awaitable[operations_pb2.Operation]]: - r"""Return a callable for the delete subscription method over gRPC. - - Deletes a subscription. - - Returns: - Callable[[~.DeleteSubscriptionRequest], - Awaitable[~.Operation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_subscription' not in self._stubs: - self._stubs['delete_subscription'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/DeleteSubscription', - request_serializer=analyticshub.DeleteSubscriptionRequest.serialize, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs['delete_subscription'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the permissions that a caller has. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.analyticshub.v1.AnalyticsHubService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_data_exchanges: self._wrap_method( - self.list_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.list_org_data_exchanges: self._wrap_method( - self.list_org_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.get_data_exchange: self._wrap_method( - self.get_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.create_data_exchange: self._wrap_method( - self.create_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.update_data_exchange: self._wrap_method( - self.update_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_exchange: self._wrap_method( - self.delete_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.list_listings: self._wrap_method( - self.list_listings, - default_timeout=None, - client_info=client_info, - ), - self.get_listing: self._wrap_method( - self.get_listing, - default_timeout=None, - client_info=client_info, - ), - self.create_listing: self._wrap_method( - self.create_listing, - default_timeout=None, - client_info=client_info, - ), - self.update_listing: self._wrap_method( - self.update_listing, - default_timeout=None, - client_info=client_info, - ), - self.delete_listing: self._wrap_method( - self.delete_listing, - default_timeout=None, - client_info=client_info, - ), - self.subscribe_listing: self._wrap_method( - self.subscribe_listing, - default_timeout=None, - client_info=client_info, - ), - self.subscribe_data_exchange: self._wrap_method( - self.subscribe_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.refresh_subscription: self._wrap_method( - self.refresh_subscription, - default_timeout=None, - client_info=client_info, - ), - self.get_subscription: self._wrap_method( - self.get_subscription, - default_timeout=None, - client_info=client_info, - ), - self.list_subscriptions: self._wrap_method( - self.list_subscriptions, - default_timeout=None, - client_info=client_info, - ), - self.list_shared_resource_subscriptions: self._wrap_method( - self.list_shared_resource_subscriptions, - default_timeout=None, - client_info=client_info, - ), - self.revoke_subscription: self._wrap_method( - self.revoke_subscription, - default_timeout=None, - client_info=client_info, - ), - self.delete_subscription: self._wrap_method( - self.delete_subscription, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'AnalyticsHubServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/types/__init__.py deleted file mode 100644 index ddbf4c9a5a48..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/types/__init__.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .analyticshub import ( - CreateDataExchangeRequest, - CreateListingRequest, - DataExchange, - DataProvider, - DeleteDataExchangeRequest, - DeleteListingRequest, - DeleteSubscriptionRequest, - DestinationDataset, - DestinationDatasetReference, - GetDataExchangeRequest, - GetListingRequest, - GetSubscriptionRequest, - ListDataExchangesRequest, - ListDataExchangesResponse, - Listing, - ListListingsRequest, - ListListingsResponse, - ListOrgDataExchangesRequest, - ListOrgDataExchangesResponse, - ListSharedResourceSubscriptionsRequest, - ListSharedResourceSubscriptionsResponse, - ListSubscriptionsRequest, - ListSubscriptionsResponse, - OperationMetadata, - Publisher, - RefreshSubscriptionRequest, - RefreshSubscriptionResponse, - RevokeSubscriptionRequest, - RevokeSubscriptionResponse, - SharingEnvironmentConfig, - SubscribeDataExchangeRequest, - SubscribeDataExchangeResponse, - SubscribeListingRequest, - SubscribeListingResponse, - Subscription, - UpdateDataExchangeRequest, - UpdateListingRequest, - DiscoveryType, -) - -__all__ = ( - 'CreateDataExchangeRequest', - 'CreateListingRequest', - 'DataExchange', - 'DataProvider', - 'DeleteDataExchangeRequest', - 'DeleteListingRequest', - 'DeleteSubscriptionRequest', - 'DestinationDataset', - 'DestinationDatasetReference', - 'GetDataExchangeRequest', - 'GetListingRequest', - 'GetSubscriptionRequest', - 'ListDataExchangesRequest', - 'ListDataExchangesResponse', - 'Listing', - 'ListListingsRequest', - 'ListListingsResponse', - 'ListOrgDataExchangesRequest', - 'ListOrgDataExchangesResponse', - 'ListSharedResourceSubscriptionsRequest', - 'ListSharedResourceSubscriptionsResponse', - 'ListSubscriptionsRequest', - 'ListSubscriptionsResponse', - 'OperationMetadata', - 'Publisher', - 'RefreshSubscriptionRequest', - 'RefreshSubscriptionResponse', - 'RevokeSubscriptionRequest', - 'RevokeSubscriptionResponse', - 'SharingEnvironmentConfig', - 'SubscribeDataExchangeRequest', - 'SubscribeDataExchangeResponse', - 'SubscribeListingRequest', - 'SubscribeListingResponse', - 'Subscription', - 'UpdateDataExchangeRequest', - 'UpdateListingRequest', - 'DiscoveryType', -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py deleted file mode 100644 index 9112ecce6580..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py +++ /dev/null @@ -1,1607 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.analyticshub.v1', - manifest={ - 'DiscoveryType', - 'DataExchange', - 'SharingEnvironmentConfig', - 'DataProvider', - 'Publisher', - 'DestinationDatasetReference', - 'DestinationDataset', - 'Listing', - 'Subscription', - 'ListDataExchangesRequest', - 'ListDataExchangesResponse', - 'ListOrgDataExchangesRequest', - 'ListOrgDataExchangesResponse', - 'GetDataExchangeRequest', - 'CreateDataExchangeRequest', - 'UpdateDataExchangeRequest', - 'DeleteDataExchangeRequest', - 'ListListingsRequest', - 'ListListingsResponse', - 'GetListingRequest', - 'CreateListingRequest', - 'UpdateListingRequest', - 'DeleteListingRequest', - 'SubscribeListingRequest', - 'SubscribeListingResponse', - 'SubscribeDataExchangeRequest', - 'SubscribeDataExchangeResponse', - 'RefreshSubscriptionRequest', - 'RefreshSubscriptionResponse', - 'GetSubscriptionRequest', - 'ListSubscriptionsRequest', - 'ListSubscriptionsResponse', - 'ListSharedResourceSubscriptionsRequest', - 'ListSharedResourceSubscriptionsResponse', - 'RevokeSubscriptionRequest', - 'RevokeSubscriptionResponse', - 'DeleteSubscriptionRequest', - 'OperationMetadata', - }, -) - - -class DiscoveryType(proto.Enum): - r"""Specifies the type of discovery on the discovery page. Note - that this does not control the visibility of the - exchange/listing which is defined by IAM permission. - - Values: - DISCOVERY_TYPE_UNSPECIFIED (0): - Unspecified. Defaults to DISCOVERY_TYPE_PRIVATE. - DISCOVERY_TYPE_PRIVATE (1): - The Data exchange/listing can be discovered - in the 'Private' results list. - DISCOVERY_TYPE_PUBLIC (2): - The Data exchange/listing can be discovered - in the 'Public' results list. - """ - DISCOVERY_TYPE_UNSPECIFIED = 0 - DISCOVERY_TYPE_PRIVATE = 1 - DISCOVERY_TYPE_PUBLIC = 2 - - -class DataExchange(proto.Message): - r"""A data exchange is a container that lets you share data. - Along with the descriptive information about the data exchange, - it contains listings that reference shared datasets. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Output only. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - display_name (str): - Required. Human-readable display name of the data exchange. - The display name must contain only Unicode letters, numbers - (0-9), underscores (_), dashes (-), spaces ( ), ampersands - (&) and must not start or end with spaces. Default value is - an empty string. Max length: 63 bytes. - description (str): - Optional. Description of the data exchange. - The description must not contain Unicode - non-characters as well as C0 and C1 control - codes except tabs (HT), new lines (LF), carriage - returns (CR), and page breaks (FF). Default - value is an empty string. - Max length: 2000 bytes. - primary_contact (str): - Optional. Email or URL of the primary point - of contact of the data exchange. Max Length: - 1000 bytes. - documentation (str): - Optional. Documentation describing the data - exchange. - listing_count (int): - Output only. Number of listings contained in - the data exchange. - icon (bytes): - Optional. Base64 encoded image representing - the data exchange. Max Size: 3.0MiB Expected - image dimensions are 512x512 pixels, however the - API only performs validation on size of the - encoded data. Note: For byte fields, the content - of the fields are base64-encoded (which - increases the size of the data by 33-36%) when - using JSON on the wire. - sharing_environment_config (google.cloud.bigquery_analyticshub_v1.types.SharingEnvironmentConfig): - Optional. Configurable data sharing - environment option for a data exchange. - discovery_type (google.cloud.bigquery_analyticshub_v1.types.DiscoveryType): - Optional. Type of discovery on the discovery page for all - the listings under this exchange. Updating this field also - updates (overwrites) the discovery_type field for all the - listings under this exchange. - - This field is a member of `oneof`_ ``_discovery_type``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=4, - ) - documentation: str = proto.Field( - proto.STRING, - number=5, - ) - listing_count: int = proto.Field( - proto.INT32, - number=6, - ) - icon: bytes = proto.Field( - proto.BYTES, - number=7, - ) - sharing_environment_config: 'SharingEnvironmentConfig' = proto.Field( - proto.MESSAGE, - number=8, - message='SharingEnvironmentConfig', - ) - discovery_type: 'DiscoveryType' = proto.Field( - proto.ENUM, - number=9, - optional=True, - enum='DiscoveryType', - ) - - -class SharingEnvironmentConfig(proto.Message): - r"""Sharing environment is a behavior model for sharing data - within a data exchange. This option is configurable for a data - exchange. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - default_exchange_config (google.cloud.bigquery_analyticshub_v1.types.SharingEnvironmentConfig.DefaultExchangeConfig): - Default Analytics Hub data exchange, used for - secured data sharing. - - This field is a member of `oneof`_ ``environment``. - dcr_exchange_config (google.cloud.bigquery_analyticshub_v1.types.SharingEnvironmentConfig.DcrExchangeConfig): - Data Clean Room (DCR), used for privacy-safe - and secured data sharing. - - This field is a member of `oneof`_ ``environment``. - """ - - class DefaultExchangeConfig(proto.Message): - r"""Default Analytics Hub data exchange, used for secured data - sharing. - - """ - - class DcrExchangeConfig(proto.Message): - r"""Data Clean Room (DCR), used for privacy-safe and secured data - sharing. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - single_selected_resource_sharing_restriction (bool): - Output only. If True, this DCR restricts the - contributors to sharing only a single resource - in a Listing. And no two resources should have - the same IDs. So if a contributor adds a view - with a conflicting name, the CreateListing API - will reject the request. if False, the data - contributor can publish an entire dataset (as - before). This is not configurable, and by - default, all new DCRs will have the restriction - set to True. - - This field is a member of `oneof`_ ``_single_selected_resource_sharing_restriction``. - single_linked_dataset_per_cleanroom (bool): - Output only. If True, when subscribing to - this DCR, it will create only one linked dataset - containing all resources shared within the - cleanroom. If False, when subscribing to this - DCR, it will create 1 linked dataset per - listing. This is not configurable, and by - default, all new DCRs will have the restriction - set to True. - - This field is a member of `oneof`_ ``_single_linked_dataset_per_cleanroom``. - """ - - single_selected_resource_sharing_restriction: bool = proto.Field( - proto.BOOL, - number=1, - optional=True, - ) - single_linked_dataset_per_cleanroom: bool = proto.Field( - proto.BOOL, - number=2, - optional=True, - ) - - default_exchange_config: DefaultExchangeConfig = proto.Field( - proto.MESSAGE, - number=1, - oneof='environment', - message=DefaultExchangeConfig, - ) - dcr_exchange_config: DcrExchangeConfig = proto.Field( - proto.MESSAGE, - number=2, - oneof='environment', - message=DcrExchangeConfig, - ) - - -class DataProvider(proto.Message): - r"""Contains details of the data provider. - - Attributes: - name (str): - Optional. Name of the data provider. - primary_contact (str): - Optional. Email or URL of the data provider. - Max Length: 1000 bytes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Publisher(proto.Message): - r"""Contains details of the listing publisher. - - Attributes: - name (str): - Optional. Name of the listing publisher. - primary_contact (str): - Optional. Email or URL of the listing - publisher. Max Length: 1000 bytes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DestinationDatasetReference(proto.Message): - r"""Contains the reference that identifies a destination bigquery - dataset. - - Attributes: - dataset_id (str): - Required. A unique ID for this dataset, without the project - name. The ID must contain only letters (a-z, A-Z), numbers - (0-9), or underscores (_). The maximum length is 1,024 - characters. - project_id (str): - Required. The ID of the project containing - this dataset. - """ - - dataset_id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DestinationDataset(proto.Message): - r"""Defines the destination bigquery dataset. - - Attributes: - dataset_reference (google.cloud.bigquery_analyticshub_v1.types.DestinationDatasetReference): - Required. A reference that identifies the - destination dataset. - friendly_name (google.protobuf.wrappers_pb2.StringValue): - Optional. A descriptive name for the dataset. - description (google.protobuf.wrappers_pb2.StringValue): - Optional. A user-friendly description of the - dataset. - labels (MutableMapping[str, str]): - Optional. The labels associated with this - dataset. You can use these to organize and group - your datasets. You can set this property when - inserting or updating a dataset. See - https://cloud.google.com/resource-manager/docs/creating-managing-labels - for more information. - location (str): - Required. The geographic location where the - dataset should reside. See - https://cloud.google.com/bigquery/docs/locations - for supported locations. - """ - - dataset_reference: 'DestinationDatasetReference' = proto.Field( - proto.MESSAGE, - number=1, - message='DestinationDatasetReference', - ) - friendly_name: wrappers_pb2.StringValue = proto.Field( - proto.MESSAGE, - number=2, - message=wrappers_pb2.StringValue, - ) - description: wrappers_pb2.StringValue = proto.Field( - proto.MESSAGE, - number=3, - message=wrappers_pb2.StringValue, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - location: str = proto.Field( - proto.STRING, - number=5, - ) - - -class Listing(proto.Message): - r"""A listing is what gets published into a data exchange that a - subscriber can subscribe to. It contains a reference to the data - source along with descriptive information that will help - subscribers find and subscribe the data. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bigquery_dataset (google.cloud.bigquery_analyticshub_v1.types.Listing.BigQueryDatasetSource): - Required. Shared dataset i.e. BigQuery - dataset source. - - This field is a member of `oneof`_ ``source``. - name (str): - Output only. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456`` - display_name (str): - Required. Human-readable display name of the listing. The - display name must contain only Unicode letters, numbers - (0-9), underscores (_), dashes (-), spaces ( ), ampersands - (&) and can't start or end with spaces. Default value is an - empty string. Max length: 63 bytes. - description (str): - Optional. Short description of the listing. - The description must not contain Unicode - non-characters and C0 and C1 control codes - except tabs (HT), new lines (LF), carriage - returns (CR), and page breaks (FF). Default - value is an empty string. Max length: 2000 - bytes. - primary_contact (str): - Optional. Email or URL of the primary point - of contact of the listing. Max Length: 1000 - bytes. - documentation (str): - Optional. Documentation describing the - listing. - state (google.cloud.bigquery_analyticshub_v1.types.Listing.State): - Output only. Current state of the listing. - icon (bytes): - Optional. Base64 encoded image representing - the listing. Max Size: 3.0MiB Expected image - dimensions are 512x512 pixels, however the API - only performs validation on size of the encoded - data. Note: For byte fields, the contents of the - field are base64-encoded (which increases the - size of the data by 33-36%) when using JSON on - the wire. - data_provider (google.cloud.bigquery_analyticshub_v1.types.DataProvider): - Optional. Details of the data provider who - owns the source data. - categories (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Listing.Category]): - Optional. Categories of the listing. Up to - two categories are allowed. - publisher (google.cloud.bigquery_analyticshub_v1.types.Publisher): - Optional. Details of the publisher who owns - the listing and who can share the source data. - request_access (str): - Optional. Email or URL of the request access - of the listing. Subscribers can use this - reference to request access. Max Length: 1000 - bytes. - restricted_export_config (google.cloud.bigquery_analyticshub_v1.types.Listing.RestrictedExportConfig): - Optional. If set, restricted export - configuration will be propagated and enforced on - the linked dataset. - discovery_type (google.cloud.bigquery_analyticshub_v1.types.DiscoveryType): - Optional. Type of discovery of the listing on - the discovery page. - - This field is a member of `oneof`_ ``_discovery_type``. - """ - class State(proto.Enum): - r"""State of the listing. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - ACTIVE (1): - Subscribable state. Users with - dataexchange.listings.subscribe permission can - subscribe to this listing. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - - class Category(proto.Enum): - r"""Listing categories. - - Values: - CATEGORY_UNSPECIFIED (0): - No description available. - CATEGORY_OTHERS (1): - No description available. - CATEGORY_ADVERTISING_AND_MARKETING (2): - No description available. - CATEGORY_COMMERCE (3): - No description available. - CATEGORY_CLIMATE_AND_ENVIRONMENT (4): - No description available. - CATEGORY_DEMOGRAPHICS (5): - No description available. - CATEGORY_ECONOMICS (6): - No description available. - CATEGORY_EDUCATION (7): - No description available. - CATEGORY_ENERGY (8): - No description available. - CATEGORY_FINANCIAL (9): - No description available. - CATEGORY_GAMING (10): - No description available. - CATEGORY_GEOSPATIAL (11): - No description available. - CATEGORY_HEALTHCARE_AND_LIFE_SCIENCE (12): - No description available. - CATEGORY_MEDIA (13): - No description available. - CATEGORY_PUBLIC_SECTOR (14): - No description available. - CATEGORY_RETAIL (15): - No description available. - CATEGORY_SPORTS (16): - No description available. - CATEGORY_SCIENCE_AND_RESEARCH (17): - No description available. - CATEGORY_TRANSPORTATION_AND_LOGISTICS (18): - No description available. - CATEGORY_TRAVEL_AND_TOURISM (19): - No description available. - """ - CATEGORY_UNSPECIFIED = 0 - CATEGORY_OTHERS = 1 - CATEGORY_ADVERTISING_AND_MARKETING = 2 - CATEGORY_COMMERCE = 3 - CATEGORY_CLIMATE_AND_ENVIRONMENT = 4 - CATEGORY_DEMOGRAPHICS = 5 - CATEGORY_ECONOMICS = 6 - CATEGORY_EDUCATION = 7 - CATEGORY_ENERGY = 8 - CATEGORY_FINANCIAL = 9 - CATEGORY_GAMING = 10 - CATEGORY_GEOSPATIAL = 11 - CATEGORY_HEALTHCARE_AND_LIFE_SCIENCE = 12 - CATEGORY_MEDIA = 13 - CATEGORY_PUBLIC_SECTOR = 14 - CATEGORY_RETAIL = 15 - CATEGORY_SPORTS = 16 - CATEGORY_SCIENCE_AND_RESEARCH = 17 - CATEGORY_TRANSPORTATION_AND_LOGISTICS = 18 - CATEGORY_TRAVEL_AND_TOURISM = 19 - - class BigQueryDatasetSource(proto.Message): - r"""A reference to a shared dataset. It is an existing BigQuery dataset - with a collection of objects such as tables and views that you want - to share with subscribers. When subscriber's subscribe to a listing, - Analytics Hub creates a linked dataset in the subscriber's project. - A Linked dataset is an opaque, read-only BigQuery dataset that - serves as a *symbolic link* to a shared dataset. - - Attributes: - dataset (str): - Resource name of the dataset source for this listing. e.g. - ``projects/myproject/datasets/123`` - selected_resources (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Listing.BigQueryDatasetSource.SelectedResource]): - Optional. Resources in this dataset that are - selectively shared. If this field is empty, then - the entire dataset (all resources) are shared. - This field is only valid for data clean room - exchanges. - restricted_export_policy (google.cloud.bigquery_analyticshub_v1.types.Listing.BigQueryDatasetSource.RestrictedExportPolicy): - Optional. If set, restricted export policy - will be propagated and enforced on the linked - dataset. - """ - - class SelectedResource(proto.Message): - r"""Resource in this dataset that are selectively shared. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table (str): - Optional. Format: For table: - ``projects/{projectId}/datasets/{datasetId}/tables/{tableId}`` - Example:"projects/test_project/datasets/test_dataset/tables/test_table". - - This field is a member of `oneof`_ ``resource``. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - oneof='resource', - ) - - class RestrictedExportPolicy(proto.Message): - r"""Restricted export policy used to configure restricted export - on linked dataset. - - Attributes: - enabled (google.protobuf.wrappers_pb2.BoolValue): - Optional. If true, enable restricted export. - restrict_direct_table_access (google.protobuf.wrappers_pb2.BoolValue): - Optional. If true, restrict direct table - access (read api/tabledata.list) on linked - table. - restrict_query_result (google.protobuf.wrappers_pb2.BoolValue): - Optional. If true, restrict export of query - result derived from restricted linked dataset - table. - """ - - enabled: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=1, - message=wrappers_pb2.BoolValue, - ) - restrict_direct_table_access: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=2, - message=wrappers_pb2.BoolValue, - ) - restrict_query_result: wrappers_pb2.BoolValue = proto.Field( - proto.MESSAGE, - number=3, - message=wrappers_pb2.BoolValue, - ) - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - selected_resources: MutableSequence['Listing.BigQueryDatasetSource.SelectedResource'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Listing.BigQueryDatasetSource.SelectedResource', - ) - restricted_export_policy: 'Listing.BigQueryDatasetSource.RestrictedExportPolicy' = proto.Field( - proto.MESSAGE, - number=3, - message='Listing.BigQueryDatasetSource.RestrictedExportPolicy', - ) - - class RestrictedExportConfig(proto.Message): - r"""Restricted export config, used to configure restricted export - on linked dataset. - - Attributes: - enabled (bool): - Optional. If true, enable restricted export. - restrict_direct_table_access (bool): - Output only. If true, restrict direct table - access(read api/tabledata.list) on linked table. - restrict_query_result (bool): - Optional. If true, restrict export of query - result derived from restricted linked dataset - table. - """ - - enabled: bool = proto.Field( - proto.BOOL, - number=3, - ) - restrict_direct_table_access: bool = proto.Field( - proto.BOOL, - number=1, - ) - restrict_query_result: bool = proto.Field( - proto.BOOL, - number=2, - ) - - bigquery_dataset: BigQueryDatasetSource = proto.Field( - proto.MESSAGE, - number=6, - oneof='source', - message=BigQueryDatasetSource, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=4, - ) - documentation: str = proto.Field( - proto.STRING, - number=5, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - icon: bytes = proto.Field( - proto.BYTES, - number=8, - ) - data_provider: 'DataProvider' = proto.Field( - proto.MESSAGE, - number=9, - message='DataProvider', - ) - categories: MutableSequence[Category] = proto.RepeatedField( - proto.ENUM, - number=10, - enum=Category, - ) - publisher: 'Publisher' = proto.Field( - proto.MESSAGE, - number=11, - message='Publisher', - ) - request_access: str = proto.Field( - proto.STRING, - number=12, - ) - restricted_export_config: RestrictedExportConfig = proto.Field( - proto.MESSAGE, - number=13, - message=RestrictedExportConfig, - ) - discovery_type: 'DiscoveryType' = proto.Field( - proto.ENUM, - number=14, - optional=True, - enum='DiscoveryType', - ) - - -class Subscription(proto.Message): - r"""A subscription represents a subscribers' access to a - particular set of published data. It contains references to - associated listings, data exchanges, and linked datasets. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - listing (str): - Output only. Resource name of the source - Listing. e.g. - projects/123/locations/US/dataExchanges/456/listings/789 - - This field is a member of `oneof`_ ``resource_name``. - data_exchange (str): - Output only. Resource name of the source Data - Exchange. e.g. - projects/123/locations/US/dataExchanges/456 - - This field is a member of `oneof`_ ``resource_name``. - name (str): - Output only. The resource name of the subscription. e.g. - ``projects/myproject/locations/US/subscriptions/123``. - creation_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the subscription - was created. - last_modify_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Timestamp when the subscription - was last modified. - organization_id (str): - Output only. Organization of the project this - subscription belongs to. - organization_display_name (str): - Output only. Display name of the project of - this subscription. - state (google.cloud.bigquery_analyticshub_v1.types.Subscription.State): - Output only. Current state of the - subscription. - linked_dataset_map (MutableMapping[str, google.cloud.bigquery_analyticshub_v1.types.Subscription.LinkedResource]): - Output only. Map of listing resource names to associated - linked resource, e.g. - projects/123/locations/US/dataExchanges/456/listings/789 -> - projects/123/datasets/my_dataset - - For listing-level subscriptions, this is a map of size 1. - Only contains values if state == STATE_ACTIVE. - subscriber_contact (str): - Output only. Email of the subscriber. - """ - class State(proto.Enum): - r"""State of the subscription. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - STATE_ACTIVE (1): - This subscription is active and the data is - accessible. - STATE_STALE (2): - The data referenced by this subscription is - out of date and should be refreshed. This can - happen when a data provider adds or removes - datasets. - STATE_INACTIVE (3): - This subscription has been cancelled or - revoked and the data is no longer accessible. - """ - STATE_UNSPECIFIED = 0 - STATE_ACTIVE = 1 - STATE_STALE = 2 - STATE_INACTIVE = 3 - - class LinkedResource(proto.Message): - r"""Reference to a linked resource tracked by this Subscription. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - linked_dataset (str): - Output only. Name of the linked dataset, e.g. - projects/subscriberproject/datasets/linked_dataset - - This field is a member of `oneof`_ ``reference``. - """ - - linked_dataset: str = proto.Field( - proto.STRING, - number=1, - oneof='reference', - ) - - listing: str = proto.Field( - proto.STRING, - number=5, - oneof='resource_name', - ) - data_exchange: str = proto.Field( - proto.STRING, - number=6, - oneof='resource_name', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - creation_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - last_modify_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - organization_id: str = proto.Field( - proto.STRING, - number=4, - ) - organization_display_name: str = proto.Field( - proto.STRING, - number=10, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - linked_dataset_map: MutableMapping[str, LinkedResource] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=8, - message=LinkedResource, - ) - subscriber_contact: str = proto.Field( - proto.STRING, - number=9, - ) - - -class ListDataExchangesRequest(proto.Message): - r"""Message for requesting the list of data exchanges. - - Attributes: - parent (str): - Required. The parent resource path of the data exchanges. - e.g. ``projects/myproject/locations/US``. - page_size (int): - The maximum number of results to return in a - single response page. Leverage the page tokens - to iterate through the entire collection. - page_token (str): - Page token, returned by a previous call, to - request the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDataExchangesResponse(proto.Message): - r"""Message for response to the list of data exchanges. - - Attributes: - data_exchanges (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.DataExchange]): - The list of data exchanges. - next_page_token (str): - A token to request the next page of results. - """ - - @property - def raw_page(self): - return self - - data_exchanges: MutableSequence['DataExchange'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataExchange', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListOrgDataExchangesRequest(proto.Message): - r"""Message for requesting the list of data exchanges from - projects in an organization and location. - - Attributes: - organization (str): - Required. The organization resource path of the projects - containing DataExchanges. e.g. - ``organizations/myorg/locations/US``. - page_size (int): - The maximum number of results to return in a - single response page. Leverage the page tokens - to iterate through the entire collection. - page_token (str): - Page token, returned by a previous call, to - request the next page of results. - """ - - organization: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListOrgDataExchangesResponse(proto.Message): - r"""Message for response to listing data exchanges in an - organization and location. - - Attributes: - data_exchanges (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.DataExchange]): - The list of data exchanges. - next_page_token (str): - A token to request the next page of results. - """ - - @property - def raw_page(self): - return self - - data_exchanges: MutableSequence['DataExchange'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataExchange', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetDataExchangeRequest(proto.Message): - r"""Message for getting a data exchange. - - Attributes: - name (str): - Required. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDataExchangeRequest(proto.Message): - r"""Message for creating a data exchange. - - Attributes: - parent (str): - Required. The parent resource path of the data exchange. - e.g. ``projects/myproject/locations/US``. - data_exchange_id (str): - Required. The ID of the data exchange. Must contain only - Unicode letters, numbers (0-9), underscores (_). Should not - use characters that require URL-escaping, or characters - outside of ASCII, spaces. Max length: 100 bytes. - data_exchange (google.cloud.bigquery_analyticshub_v1.types.DataExchange): - Required. The data exchange to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_exchange_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_exchange: 'DataExchange' = proto.Field( - proto.MESSAGE, - number=3, - message='DataExchange', - ) - - -class UpdateDataExchangeRequest(proto.Message): - r"""Message for updating a data exchange. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in the - data exchange resource. The fields specified in the - ``updateMask`` are relative to the resource and are not a - full request. - data_exchange (google.cloud.bigquery_analyticshub_v1.types.DataExchange): - Required. The data exchange to update. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_exchange: 'DataExchange' = proto.Field( - proto.MESSAGE, - number=2, - message='DataExchange', - ) - - -class DeleteDataExchangeRequest(proto.Message): - r"""Message for deleting a data exchange. - - Attributes: - name (str): - Required. The full name of the data exchange resource that - you want to delete. For example, - ``projects/myproject/locations/US/dataExchanges/123``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListListingsRequest(proto.Message): - r"""Message for requesting the list of listings. - - Attributes: - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - page_size (int): - The maximum number of results to return in a - single response page. Leverage the page tokens - to iterate through the entire collection. - page_token (str): - Page token, returned by a previous call, to - request the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListListingsResponse(proto.Message): - r"""Message for response to the list of Listings. - - Attributes: - listings (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Listing]): - The list of Listing. - next_page_token (str): - A token to request the next page of results. - """ - - @property - def raw_page(self): - return self - - listings: MutableSequence['Listing'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Listing', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetListingRequest(proto.Message): - r"""Message for getting a listing. - - Attributes: - name (str): - Required. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateListingRequest(proto.Message): - r"""Message for creating a listing. - - Attributes: - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - listing_id (str): - Required. The ID of the listing to create. Must contain only - Unicode letters, numbers (0-9), underscores (_). Should not - use characters that require URL-escaping, or characters - outside of ASCII, spaces. Max length: 100 bytes. - listing (google.cloud.bigquery_analyticshub_v1.types.Listing): - Required. The listing to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - listing_id: str = proto.Field( - proto.STRING, - number=2, - ) - listing: 'Listing' = proto.Field( - proto.MESSAGE, - number=3, - message='Listing', - ) - - -class UpdateListingRequest(proto.Message): - r"""Message for updating a Listing. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in the - listing resource. The fields specified in the ``updateMask`` - are relative to the resource and are not a full request. - listing (google.cloud.bigquery_analyticshub_v1.types.Listing): - Required. The listing to update. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - listing: 'Listing' = proto.Field( - proto.MESSAGE, - number=2, - message='Listing', - ) - - -class DeleteListingRequest(proto.Message): - r"""Message for deleting a listing. - - Attributes: - name (str): - Required. Resource name of the listing to delete. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SubscribeListingRequest(proto.Message): - r"""Message for subscribing to a listing. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - destination_dataset (google.cloud.bigquery_analyticshub_v1.types.DestinationDataset): - Input only. BigQuery destination dataset to - create for the subscriber. - - This field is a member of `oneof`_ ``destination``. - name (str): - Required. Resource name of the listing that you want to - subscribe to. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - """ - - destination_dataset: 'DestinationDataset' = proto.Field( - proto.MESSAGE, - number=3, - oneof='destination', - message='DestinationDataset', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SubscribeListingResponse(proto.Message): - r"""Message for response when you subscribe to a listing. - - Attributes: - subscription (google.cloud.bigquery_analyticshub_v1.types.Subscription): - Subscription object created from this - subscribe action. - """ - - subscription: 'Subscription' = proto.Field( - proto.MESSAGE, - number=1, - message='Subscription', - ) - - -class SubscribeDataExchangeRequest(proto.Message): - r"""Message for subscribing to a Data Exchange. - - Attributes: - name (str): - Required. Resource name of the Data Exchange. e.g. - ``projects/publisherproject/locations/US/dataExchanges/123`` - destination (str): - Required. The parent resource path of the Subscription. e.g. - ``projects/subscriberproject/locations/US`` - subscription (str): - Required. Name of the subscription to create. e.g. - ``subscription1`` - subscriber_contact (str): - Email of the subscriber. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - destination: str = proto.Field( - proto.STRING, - number=2, - ) - subscription: str = proto.Field( - proto.STRING, - number=4, - ) - subscriber_contact: str = proto.Field( - proto.STRING, - number=3, - ) - - -class SubscribeDataExchangeResponse(proto.Message): - r"""Message for response when you subscribe to a Data Exchange. - - Attributes: - subscription (google.cloud.bigquery_analyticshub_v1.types.Subscription): - Subscription object created from this - subscribe action. - """ - - subscription: 'Subscription' = proto.Field( - proto.MESSAGE, - number=1, - message='Subscription', - ) - - -class RefreshSubscriptionRequest(proto.Message): - r"""Message for refreshing a subscription. - - Attributes: - name (str): - Required. Resource name of the Subscription to refresh. e.g. - ``projects/subscriberproject/locations/US/subscriptions/123`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RefreshSubscriptionResponse(proto.Message): - r"""Message for response when you refresh a subscription. - - Attributes: - subscription (google.cloud.bigquery_analyticshub_v1.types.Subscription): - The refreshed subscription resource. - """ - - subscription: 'Subscription' = proto.Field( - proto.MESSAGE, - number=1, - message='Subscription', - ) - - -class GetSubscriptionRequest(proto.Message): - r"""Message for getting a subscription. - - Attributes: - name (str): - Required. Resource name of the subscription. - e.g. projects/123/locations/US/subscriptions/456 - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSubscriptionsRequest(proto.Message): - r"""Message for listing subscriptions. - - Attributes: - parent (str): - Required. The parent resource path of the - subscription. e.g. - projects/myproject/locations/US - filter (str): - An expression for filtering the results of the request. - Eligible fields for filtering are: - - - ``listing`` - - ``data_exchange`` - - Alternatively, a literal wrapped in double quotes may be - provided. This will be checked for an exact match against - both fields above. - - In all cases, the full Data Exchange or Listing resource - name must be provided. Some example of using filters: - - - data_exchange="projects/myproject/locations/us/dataExchanges/123" - - listing="projects/123/locations/us/dataExchanges/456/listings/789" - - "projects/myproject/locations/us/dataExchanges/123". - page_size (int): - The maximum number of results to return in a - single response page. - page_token (str): - Page token, returned by a previous call. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - filter: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSubscriptionsResponse(proto.Message): - r"""Message for response to the listing of subscriptions. - - Attributes: - subscriptions (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Subscription]): - The list of subscriptions. - next_page_token (str): - Next page token. - """ - - @property - def raw_page(self): - return self - - subscriptions: MutableSequence['Subscription'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Subscription', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListSharedResourceSubscriptionsRequest(proto.Message): - r"""Message for listing subscriptions of a shared resource. - - Attributes: - resource (str): - Required. Resource name of the requested - target. This resource may be either a Listing or - a DataExchange. e.g. - projects/123/locations/US/dataExchanges/456 OR - e.g. - projects/123/locations/US/dataExchanges/456/listings/789 - include_deleted_subscriptions (bool): - If selected, includes deleted subscriptions - in the response (up to 63 days after deletion). - page_size (int): - The maximum number of results to return in a - single response page. - page_token (str): - Page token, returned by a previous call. - """ - - resource: str = proto.Field( - proto.STRING, - number=1, - ) - include_deleted_subscriptions: bool = proto.Field( - proto.BOOL, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSharedResourceSubscriptionsResponse(proto.Message): - r"""Message for response to the listing of shared resource - subscriptions. - - Attributes: - shared_resource_subscriptions (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.Subscription]): - The list of subscriptions. - next_page_token (str): - Next page token. - """ - - @property - def raw_page(self): - return self - - shared_resource_subscriptions: MutableSequence['Subscription'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Subscription', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RevokeSubscriptionRequest(proto.Message): - r"""Message for revoking a subscription. - - Attributes: - name (str): - Required. Resource name of the subscription - to revoke. e.g. - projects/123/locations/US/subscriptions/456 - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class RevokeSubscriptionResponse(proto.Message): - r"""Message for response when you revoke a subscription. - """ - - -class DeleteSubscriptionRequest(proto.Message): - r"""Message for deleting a subscription. - - Attributes: - name (str): - Required. Resource name of the subscription - to delete. e.g. - projects/123/locations/US/subscriptions/456 - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class OperationMetadata(proto.Message): - r"""Represents the metadata of a long-running operation in - Analytics Hub. - - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation was - created. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the operation finished - running. - target (str): - Output only. Server-defined resource path for - the target of the operation. - verb (str): - Output only. Name of the verb executed by the - operation. - status_message (str): - Output only. Human-readable status of the - operation, if any. - requested_cancellation (bool): - Output only. Identifies whether the user has requested - cancellation of the operation. Operations that have - successfully been cancelled have [Operation.error][] value - with a [google.rpc.Status.code][google.rpc.Status.code] of - 1, corresponding to ``Code.CANCELLED``. - api_version (str): - Output only. API version used to start the - operation. - """ - - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - target: str = proto.Field( - proto.STRING, - number=3, - ) - verb: str = proto.Field( - proto.STRING, - number=4, - ) - status_message: str = proto.Field( - proto.STRING, - number=5, - ) - requested_cancellation: bool = proto.Field( - proto.BOOL, - number=6, - ) - api_version: str = proto.Field( - proto.STRING, - number=7, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/noxfile.py deleted file mode 100644 index 157a334537a6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-analyticshub' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_analyticshub_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_analyticshub_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py deleted file mode 100644 index a241e481e453..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_create_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = await client.create_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py deleted file mode 100644 index d68be323ec2f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_create_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = client.create_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py deleted file mode 100644 index eb5ed7d8a00e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_CreateListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_create_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = await client.create_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_CreateListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py deleted file mode 100644 index 0e4771a69ace..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_CreateListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_create_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = client.create_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_CreateListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_async.py deleted file mode 100644 index 10d73da72078..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_DeleteDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_delete_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_exchange(request=request) - - -# [END analyticshub_v1_generated_AnalyticsHubService_DeleteDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_sync.py deleted file mode 100644 index 4891d56831f6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_DeleteDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_delete_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - client.delete_data_exchange(request=request) - - -# [END analyticshub_v1_generated_AnalyticsHubService_DeleteDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_listing_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_listing_async.py deleted file mode 100644 index 57db98031f16..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_listing_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_DeleteListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_delete_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - await client.delete_listing(request=request) - - -# [END analyticshub_v1_generated_AnalyticsHubService_DeleteListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_listing_sync.py deleted file mode 100644 index ae2080e468de..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_listing_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_DeleteListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_delete_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - client.delete_listing(request=request) - - -# [END analyticshub_v1_generated_AnalyticsHubService_DeleteListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py deleted file mode 100644 index e9992694a2c9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_delete_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_subscription(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py deleted file mode 100644 index 77a5a14b2ec5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_delete_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.DeleteSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.delete_subscription(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py deleted file mode 100644 index 66108d75a09d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_get_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py deleted file mode 100644 index ad9edf1373d1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_get_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py deleted file mode 100644 index 9761cd4d60b3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py deleted file mode 100644 index 9667c2b87df8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py deleted file mode 100644 index 7af59c578b63..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_get_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py deleted file mode 100644 index 64c8910e53e3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_get_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = client.get_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py deleted file mode 100644 index 4904c7c8c0ef..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_get_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_subscription(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py deleted file mode 100644 index 9de9f0a4b9bd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_get_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.GetSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = client.get_subscription(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py deleted file mode 100644 index f4dc6a776312..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_list_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py deleted file mode 100644 index c534dd7e9d0c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_list_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py deleted file mode 100644 index aabcabce51be..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListListings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListListings_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_list_listings(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListListings_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py deleted file mode 100644 index b9fc8332ce3d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListListings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListListings_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_list_listings(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListListings_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py deleted file mode 100644 index c1c4222414c0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListOrgDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py deleted file mode 100644 index 20cee38b26e2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListOrgDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py deleted file mode 100644 index c18cc4277468..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSharedResourceSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_list_shared_resource_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( - resource="resource_value", - ) - - # Make the request - page_result = client.list_shared_resource_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py deleted file mode 100644 index 839d8443b27f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSharedResourceSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_list_shared_resource_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSharedResourceSubscriptionsRequest( - resource="resource_value", - ) - - # Make the request - page_result = client.list_shared_resource_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py deleted file mode 100644 index f8d5b75f8601..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_list_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSubscriptionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py deleted file mode 100644 index 0aa8de19a17e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_list_subscriptions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.ListSubscriptionsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py deleted file mode 100644 index c9b0ca720f3d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RefreshSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_refresh_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.refresh_subscription(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py deleted file mode 100644 index 39335a6a9ede..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RefreshSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_refresh_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RefreshSubscriptionRequest( - name="name_value", - ) - - # Make the request - operation = client.refresh_subscription(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py deleted file mode 100644 index 9fbb6a39ca89..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RevokeSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_revoke_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = await client.revoke_subscription(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py deleted file mode 100644 index 0cc09faa875b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RevokeSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_revoke_subscription(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.RevokeSubscriptionRequest( - name="name_value", - ) - - # Make the request - response = client.revoke_subscription(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py deleted file mode 100644 index 126e12d2db09..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py deleted file mode 100644 index eb9b08004dfb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py deleted file mode 100644 index fdbd5406eda6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubscribeDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_subscribe_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( - name="name_value", - destination="destination_value", - subscription="subscription_value", - ) - - # Make the request - operation = client.subscribe_data_exchange(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py deleted file mode 100644 index 84dbe0c4c4a6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubscribeDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_subscribe_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_analyticshub_v1.SubscribeDataExchangeRequest( - name="name_value", - destination="destination_value", - subscription="subscription_value", - ) - - # Make the request - operation = client.subscribe_data_exchange(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py deleted file mode 100644 index 18572d930052..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubscribeListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_subscribe_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - destination_dataset = bigquery_analyticshub_v1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_analyticshub_v1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = await client.subscribe_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py deleted file mode 100644 index 43b5a21f8378..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubscribeListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_subscribe_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - destination_dataset = bigquery_analyticshub_v1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_analyticshub_v1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = client.subscribe_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py deleted file mode 100644 index 1eddfb102903..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py deleted file mode 100644 index d4a230393eeb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py deleted file mode 100644 index 33c77b719cc1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_update_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = await client.update_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py deleted file mode 100644 index 9619895728e2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_update_data_exchange(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_analyticshub_v1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = client.update_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py deleted file mode 100644 index efd8c6ee228c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_UpdateListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -async def sample_update_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = await client.update_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_UpdateListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py deleted file mode 100644 index 75f3129c345f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-analyticshub - - -# [START analyticshub_v1_generated_AnalyticsHubService_UpdateListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_analyticshub_v1 - - -def sample_update_listing(): - # Create a client - client = bigquery_analyticshub_v1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_analyticshub_v1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_analyticshub_v1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = client.update_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1_generated_AnalyticsHubService_UpdateListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json deleted file mode 100644 index 7d9e59976e72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ /dev/null @@ -1,3553 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.analyticshub.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-analyticshub", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.create_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.CreateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.CreateDataExchangeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_analyticshub_v1.types.DataExchange" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.DataExchange", - "shortName": "create_data_exchange" - }, - "description": "Sample for CreateDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.create_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.CreateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.CreateDataExchangeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_analyticshub_v1.types.DataExchange" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.DataExchange", - "shortName": "create_data_exchange" - }, - "description": "Sample for CreateDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.create_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.CreateListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.CreateListingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_analyticshub_v1.types.Listing" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Listing", - "shortName": "create_listing" - }, - "description": "Sample for CreateListing", - "file": "analyticshub_v1_generated_analytics_hub_service_create_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_CreateListing_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_create_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.create_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.CreateListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.CreateListingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_analyticshub_v1.types.Listing" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Listing", - "shortName": "create_listing" - }, - "description": "Sample for CreateListing", - "file": "analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_CreateListing_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.delete_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_exchange" - }, - "description": "Sample for DeleteDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteDataExchange_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.delete_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_exchange" - }, - "description": "Sample for DeleteDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteDataExchange_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_delete_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.delete_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_listing" - }, - "description": "Sample for DeleteListing", - "file": "analyticshub_v1_generated_analytics_hub_service_delete_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteListing_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_delete_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.delete_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_listing" - }, - "description": "Sample for DeleteListing", - "file": "analyticshub_v1_generated_analytics_hub_service_delete_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteListing_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_delete_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.delete_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_subscription" - }, - "description": "Sample for DeleteSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.delete_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.DeleteSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.DeleteSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_subscription" - }, - "description": "Sample for DeleteSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.get_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.GetDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.DataExchange", - "shortName": "get_data_exchange" - }, - "description": "Sample for GetDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.get_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.GetDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.DataExchange", - "shortName": "get_data_exchange" - }, - "description": "Sample for GetDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.get_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.GetListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Listing", - "shortName": "get_listing" - }, - "description": "Sample for GetListing", - "file": "analyticshub_v1_generated_analytics_hub_service_get_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetListing_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.get_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.GetListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Listing", - "shortName": "get_listing" - }, - "description": "Sample for GetListing", - "file": "analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetListing_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.get_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Subscription", - "shortName": "get_subscription" - }, - "description": "Sample for GetSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.get_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.GetSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.GetSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Subscription", - "shortName": "get_subscription" - }, - "description": "Sample for GetSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesAsyncPager", - "shortName": "list_data_exchanges" - }, - "description": "Sample for ListDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListDataExchangesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListDataExchangesPager", - "shortName": "list_data_exchanges" - }, - "description": "Sample for ListDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_listings", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListListings", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListListings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsAsyncPager", - "shortName": "list_listings" - }, - "description": "Sample for ListListings", - "file": "analyticshub_v1_generated_analytics_hub_service_list_listings_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListListings_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_listings_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_listings", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListListings", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListListings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListListingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListListingsPager", - "shortName": "list_listings" - }, - "description": "Sample for ListListings", - "file": "analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListListings_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_org_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListOrgDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListOrgDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest" - }, - { - "name": "organization", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesAsyncPager", - "shortName": "list_org_data_exchanges" - }, - "description": "Sample for ListOrgDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_org_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListOrgDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListOrgDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListOrgDataExchangesRequest" - }, - { - "name": "organization", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListOrgDataExchangesPager", - "shortName": "list_org_data_exchanges" - }, - "description": "Sample for ListOrgDataExchanges", - "file": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_shared_resource_subscriptions", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSharedResourceSubscriptions", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListSharedResourceSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsAsyncPager", - "shortName": "list_shared_resource_subscriptions" - }, - "description": "Sample for ListSharedResourceSubscriptions", - "file": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_shared_resource_subscriptions", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSharedResourceSubscriptions", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListSharedResourceSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListSharedResourceSubscriptionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSharedResourceSubscriptionsPager", - "shortName": "list_shared_resource_subscriptions" - }, - "description": "Sample for ListSharedResourceSubscriptions", - "file": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.list_subscriptions", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSubscriptions", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsAsyncPager", - "shortName": "list_subscriptions" - }, - "description": "Sample for ListSubscriptions", - "file": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.list_subscriptions", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.ListSubscriptions", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.ListSubscriptionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.pagers.ListSubscriptionsPager", - "shortName": "list_subscriptions" - }, - "description": "Sample for ListSubscriptions", - "file": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.refresh_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RefreshSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "RefreshSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "refresh_subscription" - }, - "description": "Sample for RefreshSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.refresh_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RefreshSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "RefreshSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.RefreshSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "refresh_subscription" - }, - "description": "Sample for RefreshSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.revoke_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RevokeSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "RevokeSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse", - "shortName": "revoke_subscription" - }, - "description": "Sample for RevokeSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.revoke_subscription", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.RevokeSubscription", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "RevokeSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.RevokeSubscriptionResponse", - "shortName": "revoke_subscription" - }, - "description": "Sample for RevokeSubscription", - "file": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.subscribe_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SubscribeDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SubscribeDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "subscribe_data_exchange" - }, - "description": "Sample for SubscribeDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.subscribe_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SubscribeDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SubscribeDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.SubscribeDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.operation.Operation", - "shortName": "subscribe_data_exchange" - }, - "description": "Sample for SubscribeDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.subscribe_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SubscribeListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SubscribeListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.SubscribeListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.SubscribeListingResponse", - "shortName": "subscribe_listing" - }, - "description": "Sample for SubscribeListing", - "file": "analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.subscribe_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.SubscribeListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SubscribeListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.SubscribeListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.SubscribeListingResponse", - "shortName": "subscribe_listing" - }, - "description": "Sample for SubscribeListing", - "file": "analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.update_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.UpdateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.UpdateDataExchangeRequest" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_analyticshub_v1.types.DataExchange" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.DataExchange", - "shortName": "update_data_exchange" - }, - "description": "Sample for UpdateDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.update_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.UpdateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.UpdateDataExchangeRequest" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_analyticshub_v1.types.DataExchange" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.DataExchange", - "shortName": "update_data_exchange" - }, - "description": "Sample for UpdateDataExchange", - "file": "analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceAsyncClient.update_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.UpdateListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.UpdateListingRequest" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_analyticshub_v1.types.Listing" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Listing", - "shortName": "update_listing" - }, - "description": "Sample for UpdateListing", - "file": "analyticshub_v1_generated_analytics_hub_service_update_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_UpdateListing_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_update_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_analyticshub_v1.AnalyticsHubServiceClient.update_listing", - "method": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService.UpdateListing", - "service": { - "fullName": "google.cloud.bigquery.analyticshub.v1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_analyticshub_v1.types.UpdateListingRequest" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_analyticshub_v1.types.Listing" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_analyticshub_v1.types.Listing", - "shortName": "update_listing" - }, - "description": "Sample for UpdateListing", - "file": "analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1_generated_AnalyticsHubService_UpdateListing_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/scripts/fixup_bigquery_analyticshub_v1_keywords.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/scripts/fixup_bigquery_analyticshub_v1_keywords.py deleted file mode 100644 index 257b245483d6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/scripts/fixup_bigquery_analyticshub_v1_keywords.py +++ /dev/null @@ -1,197 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_analyticshubCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_data_exchange': ('parent', 'data_exchange_id', 'data_exchange', ), - 'create_listing': ('parent', 'listing_id', 'listing', ), - 'delete_data_exchange': ('name', ), - 'delete_listing': ('name', ), - 'delete_subscription': ('name', ), - 'get_data_exchange': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_listing': ('name', ), - 'get_subscription': ('name', ), - 'list_data_exchanges': ('parent', 'page_size', 'page_token', ), - 'list_listings': ('parent', 'page_size', 'page_token', ), - 'list_org_data_exchanges': ('organization', 'page_size', 'page_token', ), - 'list_shared_resource_subscriptions': ('resource', 'include_deleted_subscriptions', 'page_size', 'page_token', ), - 'list_subscriptions': ('parent', 'filter', 'page_size', 'page_token', ), - 'refresh_subscription': ('name', ), - 'revoke_subscription': ('name', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'subscribe_data_exchange': ('name', 'destination', 'subscription', 'subscriber_contact', ), - 'subscribe_listing': ('name', 'destination_dataset', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_data_exchange': ('update_mask', 'data_exchange', ), - 'update_listing': ('update_mask', 'listing', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_analyticshubCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_analyticshub client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/setup.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/setup.py deleted file mode 100644 index 13ea51f40d29..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-analyticshub' - - -description = "Google Cloud Bigquery Analyticshub API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_analyticshub/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-analyticshub" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/bigquery_analyticshub_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/bigquery_analyticshub_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/bigquery_analyticshub_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py b/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py deleted file mode 100644 index 6c3299d86508..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-analyticshub/v1/tests/unit/gapic/bigquery_analyticshub_v1/test_analytics_hub_service.py +++ /dev/null @@ -1,10573 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation -from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import AnalyticsHubServiceAsyncClient -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import AnalyticsHubServiceClient -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import pagers -from google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service import transports -from google.cloud.bigquery_analyticshub_v1.types import analyticshub -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(None) is None - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AnalyticsHubServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AnalyticsHubServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AnalyticsHubServiceClient._get_client_cert_source(None, False) is None - assert AnalyticsHubServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AnalyticsHubServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AnalyticsHubServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AnalyticsHubServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - default_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AnalyticsHubServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AnalyticsHubServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, default_universe, "always") == AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - assert AnalyticsHubServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AnalyticsHubServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AnalyticsHubServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AnalyticsHubServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AnalyticsHubServiceClient._get_universe_domain(None, None) == AnalyticsHubServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AnalyticsHubServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AnalyticsHubServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AnalyticsHubServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AnalyticsHubServiceClient, "grpc"), - (AnalyticsHubServiceAsyncClient, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'analyticshub.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AnalyticsHubServiceGrpcTransport, "grpc"), - (transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AnalyticsHubServiceClient, "grpc"), - (AnalyticsHubServiceAsyncClient, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'analyticshub.googleapis.com:443' - ) - - -def test_analytics_hub_service_client_get_transport_class(): - transport = AnalyticsHubServiceClient.get_transport_class() - available_transports = [ - transports.AnalyticsHubServiceGrpcTransport, - ] - assert transport in available_transports - - transport = AnalyticsHubServiceClient.get_transport_class("grpc") - assert transport == transports.AnalyticsHubServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -def test_analytics_hub_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AnalyticsHubServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AnalyticsHubServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", "true"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", "false"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_analytics_hub_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AnalyticsHubServiceClient, AnalyticsHubServiceAsyncClient -]) -@mock.patch.object(AnalyticsHubServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsHubServiceAsyncClient)) -def test_analytics_hub_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AnalyticsHubServiceClient, AnalyticsHubServiceAsyncClient -]) -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -def test_analytics_hub_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - default_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", grpc_helpers), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_analytics_hub_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_analytics_hub_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.transports.AnalyticsHubServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AnalyticsHubServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", grpc_helpers), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_analytics_hub_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "analyticshub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="analyticshub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.ListDataExchangesRequest, - dict, -]) -def test_list_data_exchanges(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListDataExchangesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.ListDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataExchangesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_exchanges_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.ListDataExchangesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_exchanges(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.ListDataExchangesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_data_exchanges_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_exchanges in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_exchanges] = mock_rpc - request = {} - client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_exchanges in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_exchanges] = mock_rpc - - request = {} - await client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_exchanges_async(transport: str = 'grpc_asyncio', request_type=analyticshub.ListDataExchangesRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListDataExchangesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.ListDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataExchangesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_from_dict(): - await test_list_data_exchanges_async(request_type=dict) - -def test_list_data_exchanges_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListDataExchangesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value = analyticshub.ListDataExchangesResponse() - client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_exchanges_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListDataExchangesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListDataExchangesResponse()) - await client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_exchanges_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListDataExchangesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_exchanges( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_exchanges_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_exchanges( - analyticshub.ListDataExchangesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_exchanges_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListDataExchangesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListDataExchangesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_exchanges( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_exchanges_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_exchanges( - analyticshub.ListDataExchangesRequest(), - parent='parent_value', - ) - - -def test_list_data_exchanges_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_exchanges(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyticshub.DataExchange) - for i in results) -def test_list_data_exchanges_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_exchanges(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_exchanges(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyticshub.DataExchange) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_exchanges(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - analyticshub.ListOrgDataExchangesRequest, - dict, -]) -def test_list_org_data_exchanges(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListOrgDataExchangesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.ListOrgDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrgDataExchangesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_org_data_exchanges_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.ListOrgDataExchangesRequest( - organization='organization_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_org_data_exchanges(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.ListOrgDataExchangesRequest( - organization='organization_value', - page_token='page_token_value', - ) - -def test_list_org_data_exchanges_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_org_data_exchanges in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_org_data_exchanges] = mock_rpc - request = {} - client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_org_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_org_data_exchanges in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_org_data_exchanges] = mock_rpc - - request = {} - await client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_org_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async(transport: str = 'grpc_asyncio', request_type=analyticshub.ListOrgDataExchangesRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListOrgDataExchangesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.ListOrgDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrgDataExchangesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_from_dict(): - await test_list_org_data_exchanges_async(request_type=dict) - -def test_list_org_data_exchanges_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListOrgDataExchangesRequest() - - request.organization = 'organization_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value = analyticshub.ListOrgDataExchangesResponse() - client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'organization=organization_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListOrgDataExchangesRequest() - - request.organization = 'organization_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListOrgDataExchangesResponse()) - await client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'organization=organization_value', - ) in kw['metadata'] - - -def test_list_org_data_exchanges_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListOrgDataExchangesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_org_data_exchanges( - organization='organization_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].organization - mock_val = 'organization_value' - assert arg == mock_val - - -def test_list_org_data_exchanges_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_org_data_exchanges( - analyticshub.ListOrgDataExchangesRequest(), - organization='organization_value', - ) - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListOrgDataExchangesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListOrgDataExchangesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_org_data_exchanges( - organization='organization_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].organization - mock_val = 'organization_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_org_data_exchanges( - analyticshub.ListOrgDataExchangesRequest(), - organization='organization_value', - ) - - -def test_list_org_data_exchanges_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('organization', ''), - )), - ) - pager = client.list_org_data_exchanges(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyticshub.DataExchange) - for i in results) -def test_list_org_data_exchanges_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - pages = list(client.list_org_data_exchanges(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_org_data_exchanges(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyticshub.DataExchange) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - next_page_token='abc', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - ], - next_page_token='ghi', - ), - analyticshub.ListOrgDataExchangesResponse( - data_exchanges=[ - analyticshub.DataExchange(), - analyticshub.DataExchange(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_org_data_exchanges(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - analyticshub.GetDataExchangeRequest, - dict, -]) -def test_get_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - ) - response = client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.GetDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -def test_get_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.GetDataExchangeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.GetDataExchangeRequest( - name='name_value', - ) - -def test_get_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_exchange] = mock_rpc - request = {} - client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_exchange] = mock_rpc - - request = {} - await client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_exchange_async(transport: str = 'grpc_asyncio', request_type=analyticshub.GetDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - response = await client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.GetDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -@pytest.mark.asyncio -async def test_get_data_exchange_async_from_dict(): - await test_get_data_exchange_async(request_type=dict) - -def test_get_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.GetDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value = analyticshub.DataExchange() - client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.GetDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange()) - await client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_exchange( - analyticshub.GetDataExchangeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_exchange( - analyticshub.GetDataExchangeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.CreateDataExchangeRequest, - dict, -]) -def test_create_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - ) - response = client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.CreateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -def test_create_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.CreateDataExchangeRequest( - parent='parent_value', - data_exchange_id='data_exchange_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.CreateDataExchangeRequest( - parent='parent_value', - data_exchange_id='data_exchange_id_value', - ) - -def test_create_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_exchange] = mock_rpc - request = {} - client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_exchange] = mock_rpc - - request = {} - await client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_exchange_async(transport: str = 'grpc_asyncio', request_type=analyticshub.CreateDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - response = await client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.CreateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -@pytest.mark.asyncio -async def test_create_data_exchange_async_from_dict(): - await test_create_data_exchange_async(request_type=dict) - -def test_create_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.CreateDataExchangeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value = analyticshub.DataExchange() - client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.CreateDataExchangeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange()) - await client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_exchange( - parent='parent_value', - data_exchange=analyticshub.DataExchange(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_exchange - mock_val = analyticshub.DataExchange(name='name_value') - assert arg == mock_val - - -def test_create_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_exchange( - analyticshub.CreateDataExchangeRequest(), - parent='parent_value', - data_exchange=analyticshub.DataExchange(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_exchange( - parent='parent_value', - data_exchange=analyticshub.DataExchange(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_exchange - mock_val = analyticshub.DataExchange(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_exchange( - analyticshub.CreateDataExchangeRequest(), - parent='parent_value', - data_exchange=analyticshub.DataExchange(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.UpdateDataExchangeRequest, - dict, -]) -def test_update_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - ) - response = client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.UpdateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -def test_update_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.UpdateDataExchangeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.UpdateDataExchangeRequest( - ) - -def test_update_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_exchange] = mock_rpc - request = {} - client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_exchange] = mock_rpc - - request = {} - await client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_exchange_async(transport: str = 'grpc_asyncio', request_type=analyticshub.UpdateDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - response = await client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.UpdateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -@pytest.mark.asyncio -async def test_update_data_exchange_async_from_dict(): - await test_update_data_exchange_async(request_type=dict) - -def test_update_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.UpdateDataExchangeRequest() - - request.data_exchange.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value = analyticshub.DataExchange() - client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_exchange.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.UpdateDataExchangeRequest() - - request.data_exchange.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange()) - await client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_exchange.name=name_value', - ) in kw['metadata'] - - -def test_update_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_exchange( - data_exchange=analyticshub.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_exchange - mock_val = analyticshub.DataExchange(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_exchange( - analyticshub.UpdateDataExchangeRequest(), - data_exchange=analyticshub.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.DataExchange() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_exchange( - data_exchange=analyticshub.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_exchange - mock_val = analyticshub.DataExchange(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_exchange( - analyticshub.UpdateDataExchangeRequest(), - data_exchange=analyticshub.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.DeleteDataExchangeRequest, - dict, -]) -def test_delete_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.DeleteDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.DeleteDataExchangeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.DeleteDataExchangeRequest( - name='name_value', - ) - -def test_delete_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_exchange] = mock_rpc - request = {} - client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_exchange] = mock_rpc - - request = {} - await client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_exchange_async(transport: str = 'grpc_asyncio', request_type=analyticshub.DeleteDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.DeleteDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_data_exchange_async_from_dict(): - await test_delete_data_exchange_async(request_type=dict) - -def test_delete_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.DeleteDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value = None - client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.DeleteDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_exchange( - analyticshub.DeleteDataExchangeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_exchange( - analyticshub.DeleteDataExchangeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.ListListingsRequest, - dict, -]) -def test_list_listings(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListListingsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.ListListingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListListingsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_listings_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.ListListingsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_listings(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.ListListingsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_listings_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_listings in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_listings] = mock_rpc - request = {} - client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_listings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_listings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_listings in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_listings] = mock_rpc - - request = {} - await client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_listings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_listings_async(transport: str = 'grpc_asyncio', request_type=analyticshub.ListListingsRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListListingsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.ListListingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListListingsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_listings_async_from_dict(): - await test_list_listings_async(request_type=dict) - -def test_list_listings_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListListingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value = analyticshub.ListListingsResponse() - client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_listings_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListListingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListListingsResponse()) - await client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_listings_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListListingsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_listings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_listings_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_listings( - analyticshub.ListListingsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_listings_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListListingsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListListingsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_listings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_listings_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_listings( - analyticshub.ListListingsRequest(), - parent='parent_value', - ) - - -def test_list_listings_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - analyticshub.Listing(), - ], - next_page_token='abc', - ), - analyticshub.ListListingsResponse( - listings=[], - next_page_token='def', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - ], - next_page_token='ghi', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_listings(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyticshub.Listing) - for i in results) -def test_list_listings_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - analyticshub.Listing(), - ], - next_page_token='abc', - ), - analyticshub.ListListingsResponse( - listings=[], - next_page_token='def', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - ], - next_page_token='ghi', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - ], - ), - RuntimeError, - ) - pages = list(client.list_listings(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_listings_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - analyticshub.Listing(), - ], - next_page_token='abc', - ), - analyticshub.ListListingsResponse( - listings=[], - next_page_token='def', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - ], - next_page_token='ghi', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_listings(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyticshub.Listing) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_listings_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - analyticshub.Listing(), - ], - next_page_token='abc', - ), - analyticshub.ListListingsResponse( - listings=[], - next_page_token='def', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - ], - next_page_token='ghi', - ), - analyticshub.ListListingsResponse( - listings=[ - analyticshub.Listing(), - analyticshub.Listing(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_listings(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - analyticshub.GetListingRequest, - dict, -]) -def test_get_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - ) - response = client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.GetListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == analyticshub.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [analyticshub.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -def test_get_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.GetListingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.GetListingRequest( - name='name_value', - ) - -def test_get_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_listing] = mock_rpc - request = {} - client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_listing] = mock_rpc - - request = {} - await client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_listing_async(transport: str = 'grpc_asyncio', request_type=analyticshub.GetListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - response = await client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.GetListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == analyticshub.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [analyticshub.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -@pytest.mark.asyncio -async def test_get_listing_async_from_dict(): - await test_get_listing_async(request_type=dict) - -def test_get_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.GetListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value = analyticshub.Listing() - client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.GetListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing()) - await client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_listing( - analyticshub.GetListingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_listing( - analyticshub.GetListingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.CreateListingRequest, - dict, -]) -def test_create_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - ) - response = client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.CreateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == analyticshub.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [analyticshub.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -def test_create_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.CreateListingRequest( - parent='parent_value', - listing_id='listing_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.CreateListingRequest( - parent='parent_value', - listing_id='listing_id_value', - ) - -def test_create_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_listing] = mock_rpc - request = {} - client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_listing] = mock_rpc - - request = {} - await client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_listing_async(transport: str = 'grpc_asyncio', request_type=analyticshub.CreateListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - response = await client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.CreateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == analyticshub.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [analyticshub.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -@pytest.mark.asyncio -async def test_create_listing_async_from_dict(): - await test_create_listing_async(request_type=dict) - -def test_create_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.CreateListingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value = analyticshub.Listing() - client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.CreateListingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing()) - await client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_listing( - parent='parent_value', - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].listing - mock_val = analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - - -def test_create_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_listing( - analyticshub.CreateListingRequest(), - parent='parent_value', - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - -@pytest.mark.asyncio -async def test_create_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_listing( - parent='parent_value', - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].listing - mock_val = analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_listing( - analyticshub.CreateListingRequest(), - parent='parent_value', - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.UpdateListingRequest, - dict, -]) -def test_update_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - ) - response = client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.UpdateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == analyticshub.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [analyticshub.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -def test_update_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.UpdateListingRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.UpdateListingRequest( - ) - -def test_update_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_listing] = mock_rpc - request = {} - client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_listing] = mock_rpc - - request = {} - await client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_listing_async(transport: str = 'grpc_asyncio', request_type=analyticshub.UpdateListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - response = await client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.UpdateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == analyticshub.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [analyticshub.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - assert response.discovery_type == analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE - - -@pytest.mark.asyncio -async def test_update_listing_async_from_dict(): - await test_update_listing_async(request_type=dict) - -def test_update_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.UpdateListingRequest() - - request.listing.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value = analyticshub.Listing() - client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'listing.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.UpdateListingRequest() - - request.listing.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing()) - await client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'listing.name=name_value', - ) in kw['metadata'] - - -def test_update_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_listing( - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].listing - mock_val = analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_listing( - analyticshub.UpdateListingRequest(), - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Listing() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_listing( - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].listing - mock_val = analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_listing( - analyticshub.UpdateListingRequest(), - listing=analyticshub.Listing(bigquery_dataset=analyticshub.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.DeleteListingRequest, - dict, -]) -def test_delete_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.DeleteListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.DeleteListingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.DeleteListingRequest( - name='name_value', - ) - -def test_delete_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_listing] = mock_rpc - request = {} - client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_listing] = mock_rpc - - request = {} - await client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_listing_async(transport: str = 'grpc_asyncio', request_type=analyticshub.DeleteListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.DeleteListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_listing_async_from_dict(): - await test_delete_listing_async(request_type=dict) - -def test_delete_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.DeleteListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value = None - client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.DeleteListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_listing( - analyticshub.DeleteListingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_listing( - analyticshub.DeleteListingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.SubscribeListingRequest, - dict, -]) -def test_subscribe_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.SubscribeListingResponse( - ) - response = client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.SubscribeListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.SubscribeListingResponse) - - -def test_subscribe_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.SubscribeListingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.subscribe_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.SubscribeListingRequest( - name='name_value', - ) - -def test_subscribe_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.subscribe_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.subscribe_listing] = mock_rpc - request = {} - client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.subscribe_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_subscribe_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.subscribe_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.subscribe_listing] = mock_rpc - - request = {} - await client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.subscribe_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_subscribe_listing_async(transport: str = 'grpc_asyncio', request_type=analyticshub.SubscribeListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.SubscribeListingResponse( - )) - response = await client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.SubscribeListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.SubscribeListingResponse) - - -@pytest.mark.asyncio -async def test_subscribe_listing_async_from_dict(): - await test_subscribe_listing_async(request_type=dict) - -def test_subscribe_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.SubscribeListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value = analyticshub.SubscribeListingResponse() - client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_subscribe_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.SubscribeListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.SubscribeListingResponse()) - await client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_subscribe_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.SubscribeListingResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.subscribe_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_subscribe_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.subscribe_listing( - analyticshub.SubscribeListingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_subscribe_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.SubscribeListingResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.SubscribeListingResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.subscribe_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_subscribe_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.subscribe_listing( - analyticshub.SubscribeListingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.SubscribeDataExchangeRequest, - dict, -]) -def test_subscribe_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.subscribe_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.SubscribeDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_subscribe_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.SubscribeDataExchangeRequest( - name='name_value', - destination='destination_value', - subscription='subscription_value', - subscriber_contact='subscriber_contact_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.subscribe_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.SubscribeDataExchangeRequest( - name='name_value', - destination='destination_value', - subscription='subscription_value', - subscriber_contact='subscriber_contact_value', - ) - -def test_subscribe_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.subscribe_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.subscribe_data_exchange] = mock_rpc - request = {} - client.subscribe_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.subscribe_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_subscribe_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.subscribe_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.subscribe_data_exchange] = mock_rpc - - request = {} - await client.subscribe_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.subscribe_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_subscribe_data_exchange_async(transport: str = 'grpc_asyncio', request_type=analyticshub.SubscribeDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.subscribe_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.SubscribeDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_subscribe_data_exchange_async_from_dict(): - await test_subscribe_data_exchange_async(request_type=dict) - -def test_subscribe_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.SubscribeDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.subscribe_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_subscribe_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.SubscribeDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.subscribe_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_subscribe_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.subscribe_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_subscribe_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.subscribe_data_exchange( - analyticshub.SubscribeDataExchangeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_subscribe_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.subscribe_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_subscribe_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.subscribe_data_exchange( - analyticshub.SubscribeDataExchangeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.RefreshSubscriptionRequest, - dict, -]) -def test_refresh_subscription(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.refresh_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.RefreshSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_refresh_subscription_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.RefreshSubscriptionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.refresh_subscription(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.RefreshSubscriptionRequest( - name='name_value', - ) - -def test_refresh_subscription_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.refresh_subscription in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.refresh_subscription] = mock_rpc - request = {} - client.refresh_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.refresh_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_refresh_subscription_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.refresh_subscription in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.refresh_subscription] = mock_rpc - - request = {} - await client.refresh_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.refresh_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_refresh_subscription_async(transport: str = 'grpc_asyncio', request_type=analyticshub.RefreshSubscriptionRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.refresh_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.RefreshSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_refresh_subscription_async_from_dict(): - await test_refresh_subscription_async(request_type=dict) - -def test_refresh_subscription_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.RefreshSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.refresh_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_refresh_subscription_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.RefreshSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.refresh_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_refresh_subscription_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.refresh_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_refresh_subscription_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.refresh_subscription( - analyticshub.RefreshSubscriptionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_refresh_subscription_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.refresh_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_refresh_subscription_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.refresh_subscription( - analyticshub.RefreshSubscriptionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.GetSubscriptionRequest, - dict, -]) -def test_get_subscription(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Subscription( - name='name_value', - organization_id='organization_id_value', - organization_display_name='organization_display_name_value', - state=analyticshub.Subscription.State.STATE_ACTIVE, - subscriber_contact='subscriber_contact_value', - listing='listing_value', - ) - response = client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.GetSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Subscription) - assert response.name == 'name_value' - assert response.organization_id == 'organization_id_value' - assert response.organization_display_name == 'organization_display_name_value' - assert response.state == analyticshub.Subscription.State.STATE_ACTIVE - assert response.subscriber_contact == 'subscriber_contact_value' - - -def test_get_subscription_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.GetSubscriptionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_subscription(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.GetSubscriptionRequest( - name='name_value', - ) - -def test_get_subscription_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_subscription in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_subscription] = mock_rpc - request = {} - client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_subscription_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_subscription in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_subscription] = mock_rpc - - request = {} - await client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_subscription_async(transport: str = 'grpc_asyncio', request_type=analyticshub.GetSubscriptionRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Subscription( - name='name_value', - organization_id='organization_id_value', - organization_display_name='organization_display_name_value', - state=analyticshub.Subscription.State.STATE_ACTIVE, - subscriber_contact='subscriber_contact_value', - )) - response = await client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.GetSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.Subscription) - assert response.name == 'name_value' - assert response.organization_id == 'organization_id_value' - assert response.organization_display_name == 'organization_display_name_value' - assert response.state == analyticshub.Subscription.State.STATE_ACTIVE - assert response.subscriber_contact == 'subscriber_contact_value' - - -@pytest.mark.asyncio -async def test_get_subscription_async_from_dict(): - await test_get_subscription_async(request_type=dict) - -def test_get_subscription_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.GetSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - call.return_value = analyticshub.Subscription() - client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_subscription_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.GetSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Subscription()) - await client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_subscription_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Subscription() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_subscription_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_subscription( - analyticshub.GetSubscriptionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_subscription_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.Subscription() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Subscription()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_subscription_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_subscription( - analyticshub.GetSubscriptionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.ListSubscriptionsRequest, - dict, -]) -def test_list_subscriptions(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListSubscriptionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.ListSubscriptionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubscriptionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_subscriptions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.ListSubscriptionsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_subscriptions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.ListSubscriptionsRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', - ) - -def test_list_subscriptions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_subscriptions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_subscriptions] = mock_rpc - request = {} - client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_subscriptions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_subscriptions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_subscriptions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_subscriptions] = mock_rpc - - request = {} - await client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_subscriptions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_subscriptions_async(transport: str = 'grpc_asyncio', request_type=analyticshub.ListSubscriptionsRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSubscriptionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.ListSubscriptionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubscriptionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_subscriptions_async_from_dict(): - await test_list_subscriptions_async(request_type=dict) - -def test_list_subscriptions_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListSubscriptionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - call.return_value = analyticshub.ListSubscriptionsResponse() - client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_subscriptions_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListSubscriptionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSubscriptionsResponse()) - await client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_subscriptions_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListSubscriptionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_subscriptions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_subscriptions_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_subscriptions( - analyticshub.ListSubscriptionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_subscriptions_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListSubscriptionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSubscriptionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_subscriptions( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_subscriptions_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_subscriptions( - analyticshub.ListSubscriptionsRequest(), - parent='parent_value', - ) - - -def test_list_subscriptions_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_subscriptions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyticshub.Subscription) - for i in results) -def test_list_subscriptions_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - pages = list(client.list_subscriptions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_subscriptions_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_subscriptions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyticshub.Subscription) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_subscriptions_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSubscriptionsResponse( - subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_subscriptions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - analyticshub.ListSharedResourceSubscriptionsRequest, - dict, -]) -def test_list_shared_resource_subscriptions(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_shared_resource_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.ListSharedResourceSubscriptionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSharedResourceSubscriptionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_shared_resource_subscriptions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.ListSharedResourceSubscriptionsRequest( - resource='resource_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_shared_resource_subscriptions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.ListSharedResourceSubscriptionsRequest( - resource='resource_value', - page_token='page_token_value', - ) - -def test_list_shared_resource_subscriptions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_shared_resource_subscriptions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_shared_resource_subscriptions] = mock_rpc - request = {} - client.list_shared_resource_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_shared_resource_subscriptions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_shared_resource_subscriptions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_shared_resource_subscriptions] = mock_rpc - - request = {} - await client.list_shared_resource_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_shared_resource_subscriptions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_async(transport: str = 'grpc_asyncio', request_type=analyticshub.ListSharedResourceSubscriptionsRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSharedResourceSubscriptionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_shared_resource_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.ListSharedResourceSubscriptionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSharedResourceSubscriptionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_async_from_dict(): - await test_list_shared_resource_subscriptions_async(request_type=dict) - -def test_list_shared_resource_subscriptions_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListSharedResourceSubscriptionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() - client.list_shared_resource_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.ListSharedResourceSubscriptionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSharedResourceSubscriptionsResponse()) - await client.list_shared_resource_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -def test_list_shared_resource_subscriptions_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_shared_resource_subscriptions( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_list_shared_resource_subscriptions_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_shared_resource_subscriptions( - analyticshub.ListSharedResourceSubscriptionsRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSharedResourceSubscriptionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_shared_resource_subscriptions( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_shared_resource_subscriptions( - analyticshub.ListSharedResourceSubscriptionsRequest(), - resource='resource_value', - ) - - -def test_list_shared_resource_subscriptions_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('resource', ''), - )), - ) - pager = client.list_shared_resource_subscriptions(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analyticshub.Subscription) - for i in results) -def test_list_shared_resource_subscriptions_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - pages = list(client.list_shared_resource_subscriptions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_shared_resource_subscriptions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, analyticshub.Subscription) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - next_page_token='abc', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[], - next_page_token='def', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - ], - next_page_token='ghi', - ), - analyticshub.ListSharedResourceSubscriptionsResponse( - shared_resource_subscriptions=[ - analyticshub.Subscription(), - analyticshub.Subscription(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_shared_resource_subscriptions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - analyticshub.RevokeSubscriptionRequest, - dict, -]) -def test_revoke_subscription(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.RevokeSubscriptionResponse( - ) - response = client.revoke_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.RevokeSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.RevokeSubscriptionResponse) - - -def test_revoke_subscription_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.RevokeSubscriptionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.revoke_subscription(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.RevokeSubscriptionRequest( - name='name_value', - ) - -def test_revoke_subscription_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.revoke_subscription in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.revoke_subscription] = mock_rpc - request = {} - client.revoke_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.revoke_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_revoke_subscription_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.revoke_subscription in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.revoke_subscription] = mock_rpc - - request = {} - await client.revoke_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.revoke_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_revoke_subscription_async(transport: str = 'grpc_asyncio', request_type=analyticshub.RevokeSubscriptionRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.RevokeSubscriptionResponse( - )) - response = await client.revoke_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.RevokeSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, analyticshub.RevokeSubscriptionResponse) - - -@pytest.mark.asyncio -async def test_revoke_subscription_async_from_dict(): - await test_revoke_subscription_async(request_type=dict) - -def test_revoke_subscription_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.RevokeSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - call.return_value = analyticshub.RevokeSubscriptionResponse() - client.revoke_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_revoke_subscription_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.RevokeSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.RevokeSubscriptionResponse()) - await client.revoke_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_revoke_subscription_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.RevokeSubscriptionResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.revoke_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_revoke_subscription_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.revoke_subscription( - analyticshub.RevokeSubscriptionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_revoke_subscription_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = analyticshub.RevokeSubscriptionResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.RevokeSubscriptionResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.revoke_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_revoke_subscription_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.revoke_subscription( - analyticshub.RevokeSubscriptionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - analyticshub.DeleteSubscriptionRequest, - dict, -]) -def test_delete_subscription(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = analyticshub.DeleteSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_subscription_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = analyticshub.DeleteSubscriptionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_subscription(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == analyticshub.DeleteSubscriptionRequest( - name='name_value', - ) - -def test_delete_subscription_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_subscription in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_subscription] = mock_rpc - request = {} - client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_subscription_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_subscription in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_subscription] = mock_rpc - - request = {} - await client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_subscription(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_subscription_async(transport: str = 'grpc_asyncio', request_type=analyticshub.DeleteSubscriptionRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - response = await client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = analyticshub.DeleteSubscriptionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -@pytest.mark.asyncio -async def test_delete_subscription_async_from_dict(): - await test_delete_subscription_async(request_type=dict) - -def test_delete_subscription_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.DeleteSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_subscription_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = analyticshub.DeleteSubscriptionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - await client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_subscription_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_subscription_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_subscription( - analyticshub.DeleteSubscriptionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_subscription_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/op') - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_subscription( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_subscription_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_subscription( - analyticshub.DeleteSubscriptionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AnalyticsHubServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AnalyticsHubServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AnalyticsHubServiceGrpcTransport, - transports.AnalyticsHubServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AnalyticsHubServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_exchanges_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value = analyticshub.ListDataExchangesResponse() - client.list_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_org_data_exchanges_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value = analyticshub.ListOrgDataExchangesResponse() - client.list_org_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListOrgDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value = analyticshub.DataExchange() - client.get_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.GetDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value = analyticshub.DataExchange() - client.create_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.CreateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value = analyticshub.DataExchange() - client.update_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.UpdateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value = None - client.delete_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.DeleteDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_listings_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value = analyticshub.ListListingsResponse() - client.list_listings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListListingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value = analyticshub.Listing() - client.get_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.GetListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value = analyticshub.Listing() - client.create_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.CreateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value = analyticshub.Listing() - client.update_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.UpdateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value = None - client.delete_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.DeleteListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_subscribe_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value = analyticshub.SubscribeListingResponse() - client.subscribe_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.SubscribeListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_subscribe_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.subscribe_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.SubscribeDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_refresh_subscription_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.refresh_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.RefreshSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_subscription_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - call.return_value = analyticshub.Subscription() - client.get_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.GetSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_subscriptions_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - call.return_value = analyticshub.ListSubscriptionsResponse() - client.list_subscriptions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListSubscriptionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_shared_resource_subscriptions_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - call.return_value = analyticshub.ListSharedResourceSubscriptionsResponse() - client.list_shared_resource_subscriptions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListSharedResourceSubscriptionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_revoke_subscription_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - call.return_value = analyticshub.RevokeSubscriptionResponse() - client.revoke_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.RevokeSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_subscription_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - call.return_value = operations_pb2.Operation(name='operations/op') - client.delete_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.DeleteSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AnalyticsHubServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_exchanges_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListDataExchangesResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_org_data_exchanges_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListOrgDataExchangesResponse( - next_page_token='next_page_token_value', - )) - await client.list_org_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListOrgDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - await client.get_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.GetDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - await client.create_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.CreateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - await client.update_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.UpdateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.DeleteDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_listings_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListListingsResponse( - next_page_token='next_page_token_value', - )) - await client.list_listings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListListingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - await client.get_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.GetListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - await client.create_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.CreateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=analyticshub.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[analyticshub.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - discovery_type=analyticshub.DiscoveryType.DISCOVERY_TYPE_PRIVATE, - )) - await client.update_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.UpdateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.DeleteListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_subscribe_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.SubscribeListingResponse( - )) - await client.subscribe_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.SubscribeListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_subscribe_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.subscribe_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.SubscribeDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_refresh_subscription_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.refresh_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.refresh_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.RefreshSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_subscription_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.Subscription( - name='name_value', - organization_id='organization_id_value', - organization_display_name='organization_display_name_value', - state=analyticshub.Subscription.State.STATE_ACTIVE, - subscriber_contact='subscriber_contact_value', - )) - await client.get_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.GetSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_subscriptions_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSubscriptionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_subscriptions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListSubscriptionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_shared_resource_subscriptions_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_shared_resource_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.ListSharedResourceSubscriptionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_shared_resource_subscriptions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.ListSharedResourceSubscriptionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_revoke_subscription_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.revoke_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyticshub.RevokeSubscriptionResponse( - )) - await client.revoke_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.RevokeSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_subscription_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_subscription(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analyticshub.DeleteSubscriptionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AnalyticsHubServiceGrpcTransport, - ) - -def test_analytics_hub_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AnalyticsHubServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_analytics_hub_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.transports.AnalyticsHubServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AnalyticsHubServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_data_exchanges', - 'list_org_data_exchanges', - 'get_data_exchange', - 'create_data_exchange', - 'update_data_exchange', - 'delete_data_exchange', - 'list_listings', - 'get_listing', - 'create_listing', - 'update_listing', - 'delete_listing', - 'subscribe_listing', - 'subscribe_data_exchange', - 'refresh_subscription', - 'get_subscription', - 'list_subscriptions', - 'list_shared_resource_subscriptions', - 'revoke_subscription', - 'delete_subscription', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_analytics_hub_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.transports.AnalyticsHubServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AnalyticsHubServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_analytics_hub_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_analyticshub_v1.services.analytics_hub_service.transports.AnalyticsHubServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AnalyticsHubServiceTransport() - adc.assert_called_once() - - -def test_analytics_hub_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AnalyticsHubServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AnalyticsHubServiceGrpcTransport, - transports.AnalyticsHubServiceGrpcAsyncIOTransport, - ], -) -def test_analytics_hub_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AnalyticsHubServiceGrpcTransport, - transports.AnalyticsHubServiceGrpcAsyncIOTransport, - ], -) -def test_analytics_hub_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AnalyticsHubServiceGrpcTransport, grpc_helpers), - (transports.AnalyticsHubServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_analytics_hub_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "analyticshub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="analyticshub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AnalyticsHubServiceGrpcTransport, transports.AnalyticsHubServiceGrpcAsyncIOTransport]) -def test_analytics_hub_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_analytics_hub_service_host_no_port(transport_name): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='analyticshub.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'analyticshub.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_analytics_hub_service_host_with_port(transport_name): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='analyticshub.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'analyticshub.googleapis.com:8000' - ) - -def test_analytics_hub_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AnalyticsHubServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_analytics_hub_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AnalyticsHubServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AnalyticsHubServiceGrpcTransport, transports.AnalyticsHubServiceGrpcAsyncIOTransport]) -def test_analytics_hub_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AnalyticsHubServiceGrpcTransport, transports.AnalyticsHubServiceGrpcAsyncIOTransport]) -def test_analytics_hub_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_analytics_hub_service_grpc_lro_client(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_analytics_hub_service_grpc_lro_async_client(): - client = AnalyticsHubServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - -def test_data_exchange_path(): - project = "squid" - location = "clam" - data_exchange = "whelk" - expected = "projects/{project}/locations/{location}/dataExchanges/{data_exchange}".format(project=project, location=location, data_exchange=data_exchange, ) - actual = AnalyticsHubServiceClient.data_exchange_path(project, location, data_exchange) - assert expected == actual - - -def test_parse_data_exchange_path(): - expected = { - "project": "octopus", - "location": "oyster", - "data_exchange": "nudibranch", - } - path = AnalyticsHubServiceClient.data_exchange_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_data_exchange_path(path) - assert expected == actual - -def test_dataset_path(): - project = "cuttlefish" - dataset = "mussel" - expected = "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - actual = AnalyticsHubServiceClient.dataset_path(project, dataset) - assert expected == actual - - -def test_parse_dataset_path(): - expected = { - "project": "winkle", - "dataset": "nautilus", - } - path = AnalyticsHubServiceClient.dataset_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_dataset_path(path) - assert expected == actual - -def test_listing_path(): - project = "scallop" - location = "abalone" - data_exchange = "squid" - listing = "clam" - expected = "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format(project=project, location=location, data_exchange=data_exchange, listing=listing, ) - actual = AnalyticsHubServiceClient.listing_path(project, location, data_exchange, listing) - assert expected == actual - - -def test_parse_listing_path(): - expected = { - "project": "whelk", - "location": "octopus", - "data_exchange": "oyster", - "listing": "nudibranch", - } - path = AnalyticsHubServiceClient.listing_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_listing_path(path) - assert expected == actual - -def test_subscription_path(): - project = "cuttlefish" - location = "mussel" - subscription = "winkle" - expected = "projects/{project}/locations/{location}/subscriptions/{subscription}".format(project=project, location=location, subscription=subscription, ) - actual = AnalyticsHubServiceClient.subscription_path(project, location, subscription) - assert expected == actual - - -def test_parse_subscription_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "subscription": "abalone", - } - path = AnalyticsHubServiceClient.subscription_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_subscription_path(path) - assert expected == actual - -def test_table_path(): - project = "squid" - dataset = "clam" - table = "whelk" - expected = "projects/{project}/datasets/{dataset}/tables/{table}".format(project=project, dataset=dataset, table=table, ) - actual = AnalyticsHubServiceClient.table_path(project, dataset, table) - assert expected == actual - - -def test_parse_table_path(): - expected = { - "project": "octopus", - "dataset": "oyster", - "table": "nudibranch", - } - path = AnalyticsHubServiceClient.table_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_table_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AnalyticsHubServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AnalyticsHubServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AnalyticsHubServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AnalyticsHubServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AnalyticsHubServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AnalyticsHubServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AnalyticsHubServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AnalyticsHubServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AnalyticsHubServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AnalyticsHubServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AnalyticsHubServiceTransport, '_prep_wrapped_messages') as prep: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AnalyticsHubServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AnalyticsHubServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-biglake/v1/.coveragerc deleted file mode 100644 index a349e5f15a72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_biglake/__init__.py - google/cloud/bigquery_biglake/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/.flake8 b/owl-bot-staging/google-cloud-bigquery-biglake/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-biglake/v1/MANIFEST.in deleted file mode 100644 index cfaf06e72a80..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_biglake *.py -recursive-include google/cloud/bigquery_biglake_v1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/README.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1/README.rst deleted file mode 100644 index c531f2ff7bbd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Biglake API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Biglake API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/metastore_service.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/metastore_service.rst deleted file mode 100644 index b9355d3924af..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/metastore_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetastoreService ----------------------------------- - -.. automodule:: google.cloud.bigquery_biglake_v1.services.metastore_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_biglake_v1.services.metastore_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/services_.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/services_.rst deleted file mode 100644 index a7c177be5e84..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Biglake v1 API -================================================= -.. toctree:: - :maxdepth: 2 - - metastore_service diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/types_.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/types_.rst deleted file mode 100644 index 61cdb31a3637..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/bigquery_biglake_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Biglake v1 API -============================================== - -.. automodule:: google.cloud.bigquery_biglake_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/conf.py deleted file mode 100644 index 8ea454bab3d6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-biglake documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-biglake" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-biglake-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-biglake.tex", - u"google-cloud-bigquery-biglake Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-biglake", - u"Google Cloud Bigquery Biglake Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-biglake", - u"google-cloud-bigquery-biglake Documentation", - author, - "google-cloud-bigquery-biglake", - "GAPIC library for Google Cloud Bigquery Biglake API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/index.rst deleted file mode 100644 index 7cb8c82be86f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_biglake_v1/services_ - bigquery_biglake_v1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/__init__.py deleted file mode 100644 index fcae8fe81feb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_biglake import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_biglake_v1.services.metastore_service.client import MetastoreServiceClient -from google.cloud.bigquery_biglake_v1.services.metastore_service.async_client import MetastoreServiceAsyncClient - -from google.cloud.bigquery_biglake_v1.types.metastore import Catalog -from google.cloud.bigquery_biglake_v1.types.metastore import CreateCatalogRequest -from google.cloud.bigquery_biglake_v1.types.metastore import CreateDatabaseRequest -from google.cloud.bigquery_biglake_v1.types.metastore import CreateTableRequest -from google.cloud.bigquery_biglake_v1.types.metastore import Database -from google.cloud.bigquery_biglake_v1.types.metastore import DeleteCatalogRequest -from google.cloud.bigquery_biglake_v1.types.metastore import DeleteDatabaseRequest -from google.cloud.bigquery_biglake_v1.types.metastore import DeleteTableRequest -from google.cloud.bigquery_biglake_v1.types.metastore import GetCatalogRequest -from google.cloud.bigquery_biglake_v1.types.metastore import GetDatabaseRequest -from google.cloud.bigquery_biglake_v1.types.metastore import GetTableRequest -from google.cloud.bigquery_biglake_v1.types.metastore import HiveDatabaseOptions -from google.cloud.bigquery_biglake_v1.types.metastore import HiveTableOptions -from google.cloud.bigquery_biglake_v1.types.metastore import ListCatalogsRequest -from google.cloud.bigquery_biglake_v1.types.metastore import ListCatalogsResponse -from google.cloud.bigquery_biglake_v1.types.metastore import ListDatabasesRequest -from google.cloud.bigquery_biglake_v1.types.metastore import ListDatabasesResponse -from google.cloud.bigquery_biglake_v1.types.metastore import ListTablesRequest -from google.cloud.bigquery_biglake_v1.types.metastore import ListTablesResponse -from google.cloud.bigquery_biglake_v1.types.metastore import RenameTableRequest -from google.cloud.bigquery_biglake_v1.types.metastore import Table -from google.cloud.bigquery_biglake_v1.types.metastore import UpdateDatabaseRequest -from google.cloud.bigquery_biglake_v1.types.metastore import UpdateTableRequest -from google.cloud.bigquery_biglake_v1.types.metastore import TableView - -__all__ = ('MetastoreServiceClient', - 'MetastoreServiceAsyncClient', - 'Catalog', - 'CreateCatalogRequest', - 'CreateDatabaseRequest', - 'CreateTableRequest', - 'Database', - 'DeleteCatalogRequest', - 'DeleteDatabaseRequest', - 'DeleteTableRequest', - 'GetCatalogRequest', - 'GetDatabaseRequest', - 'GetTableRequest', - 'HiveDatabaseOptions', - 'HiveTableOptions', - 'ListCatalogsRequest', - 'ListCatalogsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'ListTablesRequest', - 'ListTablesResponse', - 'RenameTableRequest', - 'Table', - 'UpdateDatabaseRequest', - 'UpdateTableRequest', - 'TableView', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/py.typed b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/py.typed deleted file mode 100644 index 70e9a3b83398..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-biglake package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/__init__.py deleted file mode 100644 index 4e983d3b566f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/__init__.py +++ /dev/null @@ -1,76 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_biglake_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.metastore_service import MetastoreServiceClient -from .services.metastore_service import MetastoreServiceAsyncClient - -from .types.metastore import Catalog -from .types.metastore import CreateCatalogRequest -from .types.metastore import CreateDatabaseRequest -from .types.metastore import CreateTableRequest -from .types.metastore import Database -from .types.metastore import DeleteCatalogRequest -from .types.metastore import DeleteDatabaseRequest -from .types.metastore import DeleteTableRequest -from .types.metastore import GetCatalogRequest -from .types.metastore import GetDatabaseRequest -from .types.metastore import GetTableRequest -from .types.metastore import HiveDatabaseOptions -from .types.metastore import HiveTableOptions -from .types.metastore import ListCatalogsRequest -from .types.metastore import ListCatalogsResponse -from .types.metastore import ListDatabasesRequest -from .types.metastore import ListDatabasesResponse -from .types.metastore import ListTablesRequest -from .types.metastore import ListTablesResponse -from .types.metastore import RenameTableRequest -from .types.metastore import Table -from .types.metastore import UpdateDatabaseRequest -from .types.metastore import UpdateTableRequest -from .types.metastore import TableView - -__all__ = ( - 'MetastoreServiceAsyncClient', -'Catalog', -'CreateCatalogRequest', -'CreateDatabaseRequest', -'CreateTableRequest', -'Database', -'DeleteCatalogRequest', -'DeleteDatabaseRequest', -'DeleteTableRequest', -'GetCatalogRequest', -'GetDatabaseRequest', -'GetTableRequest', -'HiveDatabaseOptions', -'HiveTableOptions', -'ListCatalogsRequest', -'ListCatalogsResponse', -'ListDatabasesRequest', -'ListDatabasesResponse', -'ListTablesRequest', -'ListTablesResponse', -'MetastoreServiceClient', -'RenameTableRequest', -'Table', -'TableView', -'UpdateDatabaseRequest', -'UpdateTableRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/gapic_metadata.json deleted file mode 100644 index 9bb7b3706730..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/gapic_metadata.json +++ /dev/null @@ -1,253 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_biglake_v1", - "protoPackage": "google.cloud.bigquery.biglake.v1", - "schema": "1.0", - "services": { - "MetastoreService": { - "clients": { - "grpc": { - "libraryClient": "MetastoreServiceClient", - "rpcs": { - "CreateCatalog": { - "methods": [ - "create_catalog" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateTable": { - "methods": [ - "create_table" - ] - }, - "DeleteCatalog": { - "methods": [ - "delete_catalog" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteTable": { - "methods": [ - "delete_table" - ] - }, - "GetCatalog": { - "methods": [ - "get_catalog" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetTable": { - "methods": [ - "get_table" - ] - }, - "ListCatalogs": { - "methods": [ - "list_catalogs" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListTables": { - "methods": [ - "list_tables" - ] - }, - "RenameTable": { - "methods": [ - "rename_table" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateTable": { - "methods": [ - "update_table" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetastoreServiceAsyncClient", - "rpcs": { - "CreateCatalog": { - "methods": [ - "create_catalog" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateTable": { - "methods": [ - "create_table" - ] - }, - "DeleteCatalog": { - "methods": [ - "delete_catalog" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteTable": { - "methods": [ - "delete_table" - ] - }, - "GetCatalog": { - "methods": [ - "get_catalog" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetTable": { - "methods": [ - "get_table" - ] - }, - "ListCatalogs": { - "methods": [ - "list_catalogs" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListTables": { - "methods": [ - "list_tables" - ] - }, - "RenameTable": { - "methods": [ - "rename_table" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateTable": { - "methods": [ - "update_table" - ] - } - } - }, - "rest": { - "libraryClient": "MetastoreServiceClient", - "rpcs": { - "CreateCatalog": { - "methods": [ - "create_catalog" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateTable": { - "methods": [ - "create_table" - ] - }, - "DeleteCatalog": { - "methods": [ - "delete_catalog" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteTable": { - "methods": [ - "delete_table" - ] - }, - "GetCatalog": { - "methods": [ - "get_catalog" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetTable": { - "methods": [ - "get_table" - ] - }, - "ListCatalogs": { - "methods": [ - "list_catalogs" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListTables": { - "methods": [ - "list_tables" - ] - }, - "RenameTable": { - "methods": [ - "rename_table" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateTable": { - "methods": [ - "update_table" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/py.typed b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/py.typed deleted file mode 100644 index 70e9a3b83398..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-biglake package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/__init__.py deleted file mode 100644 index 3e854a1eaae5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetastoreServiceClient -from .async_client import MetastoreServiceAsyncClient - -__all__ = ( - 'MetastoreServiceClient', - 'MetastoreServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py deleted file mode 100644 index e116a22e9b1a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py +++ /dev/null @@ -1,2016 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_biglake_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_biglake_v1.services.metastore_service import pagers -from google.cloud.bigquery_biglake_v1.types import metastore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport -from .client import MetastoreServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class MetastoreServiceAsyncClient: - """BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - """ - - _client: MetastoreServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MetastoreServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = MetastoreServiceClient._DEFAULT_UNIVERSE - - catalog_path = staticmethod(MetastoreServiceClient.catalog_path) - parse_catalog_path = staticmethod(MetastoreServiceClient.parse_catalog_path) - database_path = staticmethod(MetastoreServiceClient.database_path) - parse_database_path = staticmethod(MetastoreServiceClient.parse_database_path) - table_path = staticmethod(MetastoreServiceClient.table_path) - parse_table_path = staticmethod(MetastoreServiceClient.parse_table_path) - common_billing_account_path = staticmethod(MetastoreServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetastoreServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MetastoreServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MetastoreServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MetastoreServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MetastoreServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MetastoreServiceClient.common_project_path) - parse_common_project_path = staticmethod(MetastoreServiceClient.parse_common_project_path) - common_location_path = staticmethod(MetastoreServiceClient.common_location_path) - parse_common_location_path = staticmethod(MetastoreServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceAsyncClient: The constructed client. - """ - return MetastoreServiceClient.from_service_account_info.__func__(MetastoreServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceAsyncClient: The constructed client. - """ - return MetastoreServiceClient.from_service_account_file.__func__(MetastoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MetastoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MetastoreServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetastoreServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = MetastoreServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetastoreServiceTransport, Callable[..., MetastoreServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metastore service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetastoreServiceTransport,Callable[..., MetastoreServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetastoreServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetastoreServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.biglake_v1.MetastoreServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "credentialsType": None, - } - ) - - async def create_catalog(self, - request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, - *, - parent: Optional[str] = None, - catalog: Optional[metastore.Catalog] = None, - catalog_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Creates a new catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = await client.create_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.CreateCatalogRequest, dict]]): - The request object. Request message for the CreateCatalog - method. - parent (:class:`str`): - Required. The parent resource where this catalog will be - created. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog (:class:`google.cloud.bigquery_biglake_v1.types.Catalog`): - Required. The catalog to create. The ``name`` field does - not need to be provided. - - This corresponds to the ``catalog`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog_id (:class:`str`): - Required. The ID to use for the - catalog, which will become the final - component of the catalog's resource - name. - - This corresponds to the ``catalog_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, catalog, catalog_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateCatalogRequest): - request = metastore.CreateCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if catalog is not None: - request.catalog = catalog - if catalog_id is not None: - request.catalog_id = catalog_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_catalog(self, - request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Deletes an existing catalog specified by the catalog - ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.DeleteCatalogRequest, dict]]): - The request object. Request message for the DeleteCatalog - method. - name (:class:`str`): - Required. The name of the catalog to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteCatalogRequest): - request = metastore.DeleteCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_catalog(self, - request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Gets the catalog specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.get_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.GetCatalogRequest, dict]]): - The request object. Request message for the GetCatalog - method. - name (:class:`str`): - Required. The name of the catalog to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetCatalogRequest): - request = metastore.GetCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_catalogs(self, - request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCatalogsAsyncPager: - r"""List all catalogs in a specified project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.ListCatalogsRequest, dict]]): - The request object. Request message for the ListCatalogs - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - catalogs. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListCatalogsAsyncPager: - Response message for the ListCatalogs - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListCatalogsRequest): - request = metastore.ListCatalogsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_catalogs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListCatalogsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_database(self, - request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[metastore.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Creates a new database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_create_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = await client.create_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.CreateDatabaseRequest, dict]]): - The request object. Request message for the - CreateDatabase method. - parent (:class:`str`): - Required. The parent resource where this database will - be created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (:class:`google.cloud.bigquery_biglake_v1.types.Database`): - Required. The database to create. The ``name`` field - does not need to be provided. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The ID to use for the - database, which will become the final - component of the database's resource - name. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database, database_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateDatabaseRequest): - request = metastore.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_database(self, - request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Deletes an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.DeleteDatabaseRequest, dict]]): - The request object. Request message for the - DeleteDatabase method. - name (:class:`str`): - Required. The name of the database to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteDatabaseRequest): - request = metastore.DeleteDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database(self, - request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[metastore.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Updates an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_update_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateDatabaseRequest( - ) - - # Make the request - response = await client.update_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.UpdateDatabaseRequest, dict]]): - The request object. Request message for the - UpdateDatabase method. - database (:class:`google.cloud.bigquery_biglake_v1.types.Database`): - Required. The database to update. - - The database's ``name`` field is used to identify the - database to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateDatabaseRequest): - request = metastore.UpdateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_database(self, - request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Gets the database specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_get_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.GetDatabaseRequest, dict]]): - The request object. Request message for the GetDatabase - method. - name (:class:`str`): - Required. The name of the database to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetDatabaseRequest): - request = metastore.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_databases(self, - request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesAsyncPager: - r"""List all databases in a specified catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.ListDatabasesRequest, dict]]): - The request object. Request message for the ListDatabases - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - databases. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListDatabasesAsyncPager: - Response message for the - ListDatabases method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListDatabasesRequest): - request = metastore.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabasesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_table(self, - request: Optional[Union[metastore.CreateTableRequest, dict]] = None, - *, - parent: Optional[str] = None, - table: Optional[metastore.Table] = None, - table_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Creates a new table. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_create_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = await client.create_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.CreateTableRequest, dict]]): - The request object. Request message for the CreateTable - method. - parent (:class:`str`): - Required. The parent resource where this table will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table (:class:`google.cloud.bigquery_biglake_v1.types.Table`): - Required. The table to create. The ``name`` field does - not need to be provided for the table creation. - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table_id (:class:`str`): - Required. The ID to use for the - table, which will become the final - component of the table's resource name. - - This corresponds to the ``table_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, table, table_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateTableRequest): - request = metastore.CreateTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if table is not None: - request.table = table - if table_id is not None: - request.table_id = table_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_table(self, - request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Deletes an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.DeleteTableRequest, dict]]): - The request object. Request message for the DeleteTable - method. - name (:class:`str`): - Required. The name of the table to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteTableRequest): - request = metastore.DeleteTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_table(self, - request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, - *, - table: Optional[metastore.Table] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Updates an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_update_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateTableRequest( - ) - - # Make the request - response = await client.update_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.UpdateTableRequest, dict]]): - The request object. Request message for the UpdateTable - method. - table (:class:`google.cloud.bigquery_biglake_v1.types.Table`): - Required. The table to update. - - The table's ``name`` field is used to identify the table - to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [table, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateTableRequest): - request = metastore.UpdateTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if table is not None: - request.table = table - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("table.name", request.table.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_table(self, - request: Optional[Union[metastore.RenameTableRequest, dict]] = None, - *, - name: Optional[str] = None, - new_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Renames an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = await client.rename_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.RenameTableRequest, dict]]): - The request object. Request message for the RenameTable - method in MetastoreService - name (:class:`str`): - Required. The table's ``name`` field is used to identify - the table to rename. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_name (:class:`str`): - Required. The new ``name`` for the specified table, must - be in the same database. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``new_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.RenameTableRequest): - request = metastore.RenameTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_name is not None: - request.new_name = new_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_table(self, - request: Optional[Union[metastore.GetTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Gets the table specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_get_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.GetTableRequest, dict]]): - The request object. Request message for the GetTable - method. - name (:class:`str`): - Required. The name of the table to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetTableRequest): - request = metastore.GetTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tables(self, - request: Optional[Union[metastore.ListTablesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTablesAsyncPager: - r"""List all tables in a specified database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - async def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1.types.ListTablesRequest, dict]]): - The request object. Request message for the ListTables - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - tables. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListTablesAsyncPager: - Response message for the ListTables - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListTablesRequest): - request = metastore.ListTablesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_tables] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTablesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "MetastoreServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetastoreServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py deleted file mode 100644 index 64cf6d42368d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py +++ /dev/null @@ -1,2381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_biglake_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_biglake_v1.services.metastore_service import pagers -from google.cloud.bigquery_biglake_v1.types import metastore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetastoreServiceGrpcTransport -from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport -from .transports.rest import MetastoreServiceRestTransport - - -class MetastoreServiceClientMeta(type): - """Metaclass for the MetastoreService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetastoreServiceTransport]] - _transport_registry["grpc"] = MetastoreServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MetastoreServiceGrpcAsyncIOTransport - _transport_registry["rest"] = MetastoreServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetastoreServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetastoreServiceClient(metaclass=MetastoreServiceClientMeta): - """BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "biglake.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "biglake.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetastoreServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetastoreServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def catalog_path(project: str,location: str,catalog: str,) -> str: - """Returns a fully-qualified catalog string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}".format(project=project, location=location, catalog=catalog, ) - - @staticmethod - def parse_catalog_path(path: str) -> Dict[str,str]: - """Parses a catalog path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_path(project: str,location: str,catalog: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format(project=project, location=location, catalog=catalog, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def table_path(project: str,location: str,catalog: str,database: str,table: str,) -> str: - """Returns a fully-qualified table string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format(project=project, location=location, catalog=catalog, database=database, table=table, ) - - @staticmethod - def parse_table_path(path: str) -> Dict[str,str]: - """Parses a table path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)/tables/(?P
.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = MetastoreServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetastoreServiceTransport, Callable[..., MetastoreServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metastore service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetastoreServiceTransport,Callable[..., MetastoreServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetastoreServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetastoreServiceClient._read_environment_variables() - self._client_cert_source = MetastoreServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MetastoreServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MetastoreServiceTransport) - if transport_provided: - # transport is a MetastoreServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(MetastoreServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - MetastoreServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[MetastoreServiceTransport], Callable[..., MetastoreServiceTransport]] = ( - MetastoreServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., MetastoreServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.biglake_v1.MetastoreServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "credentialsType": None, - } - ) - - def create_catalog(self, - request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, - *, - parent: Optional[str] = None, - catalog: Optional[metastore.Catalog] = None, - catalog_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Creates a new catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = client.create_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.CreateCatalogRequest, dict]): - The request object. Request message for the CreateCatalog - method. - parent (str): - Required. The parent resource where this catalog will be - created. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog (google.cloud.bigquery_biglake_v1.types.Catalog): - Required. The catalog to create. The ``name`` field does - not need to be provided. - - This corresponds to the ``catalog`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog_id (str): - Required. The ID to use for the - catalog, which will become the final - component of the catalog's resource - name. - - This corresponds to the ``catalog_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, catalog, catalog_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateCatalogRequest): - request = metastore.CreateCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if catalog is not None: - request.catalog = catalog - if catalog_id is not None: - request.catalog_id = catalog_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_catalog(self, - request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Deletes an existing catalog specified by the catalog - ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.delete_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.DeleteCatalogRequest, dict]): - The request object. Request message for the DeleteCatalog - method. - name (str): - Required. The name of the catalog to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteCatalogRequest): - request = metastore.DeleteCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_catalog(self, - request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Gets the catalog specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.get_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.GetCatalogRequest, dict]): - The request object. Request message for the GetCatalog - method. - name (str): - Required. The name of the catalog to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetCatalogRequest): - request = metastore.GetCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_catalogs(self, - request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCatalogsPager: - r"""List all catalogs in a specified project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.ListCatalogsRequest, dict]): - The request object. Request message for the ListCatalogs - method. - parent (str): - Required. The parent, which owns this collection of - catalogs. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListCatalogsPager: - Response message for the ListCatalogs - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListCatalogsRequest): - request = metastore.ListCatalogsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_catalogs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListCatalogsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_database(self, - request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[metastore.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Creates a new database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_create_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = client.create_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.CreateDatabaseRequest, dict]): - The request object. Request message for the - CreateDatabase method. - parent (str): - Required. The parent resource where this database will - be created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (google.cloud.bigquery_biglake_v1.types.Database): - Required. The database to create. The ``name`` field - does not need to be provided. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The ID to use for the - database, which will become the final - component of the database's resource - name. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database, database_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateDatabaseRequest): - request = metastore.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_database(self, - request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Deletes an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.delete_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.DeleteDatabaseRequest, dict]): - The request object. Request message for the - DeleteDatabase method. - name (str): - Required. The name of the database to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteDatabaseRequest): - request = metastore.DeleteDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database(self, - request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[metastore.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Updates an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_update_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateDatabaseRequest( - ) - - # Make the request - response = client.update_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.UpdateDatabaseRequest, dict]): - The request object. Request message for the - UpdateDatabase method. - database (google.cloud.bigquery_biglake_v1.types.Database): - Required. The database to update. - - The database's ``name`` field is used to identify the - database to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateDatabaseRequest): - request = metastore.UpdateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_database(self, - request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Gets the database specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_get_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.GetDatabaseRequest, dict]): - The request object. Request message for the GetDatabase - method. - name (str): - Required. The name of the database to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetDatabaseRequest): - request = metastore.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_databases(self, - request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesPager: - r"""List all databases in a specified catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.ListDatabasesRequest, dict]): - The request object. Request message for the ListDatabases - method. - parent (str): - Required. The parent, which owns this collection of - databases. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListDatabasesPager: - Response message for the - ListDatabases method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListDatabasesRequest): - request = metastore.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabasesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_table(self, - request: Optional[Union[metastore.CreateTableRequest, dict]] = None, - *, - parent: Optional[str] = None, - table: Optional[metastore.Table] = None, - table_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Creates a new table. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_create_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = client.create_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.CreateTableRequest, dict]): - The request object. Request message for the CreateTable - method. - parent (str): - Required. The parent resource where this table will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table (google.cloud.bigquery_biglake_v1.types.Table): - Required. The table to create. The ``name`` field does - not need to be provided for the table creation. - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table_id (str): - Required. The ID to use for the - table, which will become the final - component of the table's resource name. - - This corresponds to the ``table_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, table, table_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateTableRequest): - request = metastore.CreateTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if table is not None: - request.table = table - if table_id is not None: - request.table_id = table_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_table(self, - request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Deletes an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = client.delete_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.DeleteTableRequest, dict]): - The request object. Request message for the DeleteTable - method. - name (str): - Required. The name of the table to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteTableRequest): - request = metastore.DeleteTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_table(self, - request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, - *, - table: Optional[metastore.Table] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Updates an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_update_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateTableRequest( - ) - - # Make the request - response = client.update_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.UpdateTableRequest, dict]): - The request object. Request message for the UpdateTable - method. - table (google.cloud.bigquery_biglake_v1.types.Table): - Required. The table to update. - - The table's ``name`` field is used to identify the table - to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [table, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateTableRequest): - request = metastore.UpdateTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if table is not None: - request.table = table - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("table.name", request.table.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_table(self, - request: Optional[Union[metastore.RenameTableRequest, dict]] = None, - *, - name: Optional[str] = None, - new_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Renames an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = client.rename_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.RenameTableRequest, dict]): - The request object. Request message for the RenameTable - method in MetastoreService - name (str): - Required. The table's ``name`` field is used to identify - the table to rename. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_name (str): - Required. The new ``name`` for the specified table, must - be in the same database. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``new_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.RenameTableRequest): - request = metastore.RenameTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_name is not None: - request.new_name = new_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_table(self, - request: Optional[Union[metastore.GetTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Gets the table specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_get_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = client.get_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.GetTableRequest, dict]): - The request object. Request message for the GetTable - method. - name (str): - Required. The name of the table to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetTableRequest): - request = metastore.GetTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tables(self, - request: Optional[Union[metastore.ListTablesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTablesPager: - r"""List all tables in a specified database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1 - - def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1.types.ListTablesRequest, dict]): - The request object. Request message for the ListTables - method. - parent (str): - Required. The parent, which owns this collection of - tables. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListTablesPager: - Response message for the ListTables - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListTablesRequest): - request = metastore.ListTablesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tables] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTablesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "MetastoreServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetastoreServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/pagers.py deleted file mode 100644 index 1a00856f03f3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/pagers.py +++ /dev/null @@ -1,444 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_biglake_v1.types import metastore - - -class ListCatalogsPager: - """A pager for iterating through ``list_catalogs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1.types.ListCatalogsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``catalogs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListCatalogs`` requests and continue to iterate - through the ``catalogs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1.types.ListCatalogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListCatalogsResponse], - request: metastore.ListCatalogsRequest, - response: metastore.ListCatalogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1.types.ListCatalogsRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1.types.ListCatalogsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListCatalogsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListCatalogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Catalog]: - for page in self.pages: - yield from page.catalogs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListCatalogsAsyncPager: - """A pager for iterating through ``list_catalogs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1.types.ListCatalogsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``catalogs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListCatalogs`` requests and continue to iterate - through the ``catalogs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1.types.ListCatalogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListCatalogsResponse]], - request: metastore.ListCatalogsRequest, - response: metastore.ListCatalogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1.types.ListCatalogsRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1.types.ListCatalogsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListCatalogsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListCatalogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Catalog]: - async def async_generator(): - async for page in self.pages: - for response in page.catalogs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1.types.ListDatabasesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListDatabasesResponse], - request: metastore.ListDatabasesRequest, - response: metastore.ListDatabasesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Database]: - for page in self.pages: - yield from page.databases - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesAsyncPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1.types.ListDatabasesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListDatabasesResponse]], - request: metastore.ListDatabasesRequest, - response: metastore.ListDatabasesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Database]: - async def async_generator(): - async for page in self.pages: - for response in page.databases: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTablesPager: - """A pager for iterating through ``list_tables`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1.types.ListTablesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tables`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTables`` requests and continue to iterate - through the ``tables`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1.types.ListTablesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListTablesResponse], - request: metastore.ListTablesRequest, - response: metastore.ListTablesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1.types.ListTablesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1.types.ListTablesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListTablesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListTablesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Table]: - for page in self.pages: - yield from page.tables - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTablesAsyncPager: - """A pager for iterating through ``list_tables`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1.types.ListTablesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tables`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTables`` requests and continue to iterate - through the ``tables`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1.types.ListTablesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListTablesResponse]], - request: metastore.ListTablesRequest, - response: metastore.ListTablesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1.types.ListTablesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1.types.ListTablesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListTablesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListTablesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Table]: - async def async_generator(): - async for page in self.pages: - for response in page.tables: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/README.rst deleted file mode 100644 index 489d37387a08..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`MetastoreServiceTransport` is the ABC for all transports. -- public child `MetastoreServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `MetastoreServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseMetastoreServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `MetastoreServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/__init__.py deleted file mode 100644 index 5bacbdea32ed..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetastoreServiceTransport -from .grpc import MetastoreServiceGrpcTransport -from .grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport -from .rest import MetastoreServiceRestTransport -from .rest import MetastoreServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetastoreServiceTransport]] -_transport_registry['grpc'] = MetastoreServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MetastoreServiceGrpcAsyncIOTransport -_transport_registry['rest'] = MetastoreServiceRestTransport - -__all__ = ( - 'MetastoreServiceTransport', - 'MetastoreServiceGrpcTransport', - 'MetastoreServiceGrpcAsyncIOTransport', - 'MetastoreServiceRestTransport', - 'MetastoreServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/base.py deleted file mode 100644 index 276d85c704d1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/base.py +++ /dev/null @@ -1,351 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_biglake_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_biglake_v1.types import metastore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MetastoreServiceTransport(abc.ABC): - """Abstract transport class for MetastoreService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'biglake.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_catalog: gapic_v1.method.wrap_method( - self.create_catalog, - default_timeout=None, - client_info=client_info, - ), - self.delete_catalog: gapic_v1.method.wrap_method( - self.delete_catalog, - default_timeout=None, - client_info=client_info, - ), - self.get_catalog: gapic_v1.method.wrap_method( - self.get_catalog, - default_timeout=None, - client_info=client_info, - ), - self.list_catalogs: gapic_v1.method.wrap_method( - self.list_catalogs, - default_timeout=None, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: gapic_v1.method.wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.update_database: gapic_v1.method.wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.create_table: gapic_v1.method.wrap_method( - self.create_table, - default_timeout=None, - client_info=client_info, - ), - self.delete_table: gapic_v1.method.wrap_method( - self.delete_table, - default_timeout=None, - client_info=client_info, - ), - self.update_table: gapic_v1.method.wrap_method( - self.update_table, - default_timeout=None, - client_info=client_info, - ), - self.rename_table: gapic_v1.method.wrap_method( - self.rename_table, - default_timeout=None, - client_info=client_info, - ), - self.get_table: gapic_v1.method.wrap_method( - self.get_table, - default_timeout=None, - client_info=client_info, - ), - self.list_tables: gapic_v1.method.wrap_method( - self.list_tables, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - Union[ - metastore.Catalog, - Awaitable[metastore.Catalog] - ]]: - raise NotImplementedError() - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - Union[ - metastore.Catalog, - Awaitable[metastore.Catalog] - ]]: - raise NotImplementedError() - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - Union[ - metastore.Catalog, - Awaitable[metastore.Catalog] - ]]: - raise NotImplementedError() - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - Union[ - metastore.ListCatalogsResponse, - Awaitable[metastore.ListCatalogsResponse] - ]]: - raise NotImplementedError() - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - Union[ - metastore.ListDatabasesResponse, - Awaitable[metastore.ListDatabasesResponse] - ]]: - raise NotImplementedError() - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - Union[ - metastore.ListTablesResponse, - Awaitable[metastore.ListTablesResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MetastoreServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/grpc.py deleted file mode 100644 index bd00d192acf1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/grpc.py +++ /dev/null @@ -1,724 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_biglake_v1.types import metastore -from .base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MetastoreServiceGrpcTransport(MetastoreServiceTransport): - """gRPC backend transport for MetastoreService. - - BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - metastore.Catalog]: - r"""Return a callable for the create catalog method over gRPC. - - Creates a new catalog. - - Returns: - Callable[[~.CreateCatalogRequest], - ~.Catalog]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_catalog' not in self._stubs: - self._stubs['create_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/CreateCatalog', - request_serializer=metastore.CreateCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['create_catalog'] - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - metastore.Catalog]: - r"""Return a callable for the delete catalog method over gRPC. - - Deletes an existing catalog specified by the catalog - ID. - - Returns: - Callable[[~.DeleteCatalogRequest], - ~.Catalog]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_catalog' not in self._stubs: - self._stubs['delete_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteCatalog', - request_serializer=metastore.DeleteCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['delete_catalog'] - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - metastore.Catalog]: - r"""Return a callable for the get catalog method over gRPC. - - Gets the catalog specified by the resource name. - - Returns: - Callable[[~.GetCatalogRequest], - ~.Catalog]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_catalog' not in self._stubs: - self._stubs['get_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/GetCatalog', - request_serializer=metastore.GetCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['get_catalog'] - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - metastore.ListCatalogsResponse]: - r"""Return a callable for the list catalogs method over gRPC. - - List all catalogs in a specified project. - - Returns: - Callable[[~.ListCatalogsRequest], - ~.ListCatalogsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_catalogs' not in self._stubs: - self._stubs['list_catalogs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/ListCatalogs', - request_serializer=metastore.ListCatalogsRequest.serialize, - response_deserializer=metastore.ListCatalogsResponse.deserialize, - ) - return self._stubs['list_catalogs'] - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - metastore.Database]: - r"""Return a callable for the create database method over gRPC. - - Creates a new database. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/CreateDatabase', - request_serializer=metastore.CreateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['create_database'] - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - metastore.Database]: - r"""Return a callable for the delete database method over gRPC. - - Deletes an existing database specified by the - database ID. - - Returns: - Callable[[~.DeleteDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_database' not in self._stubs: - self._stubs['delete_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteDatabase', - request_serializer=metastore.DeleteDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['delete_database'] - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - metastore.Database]: - r"""Return a callable for the update database method over gRPC. - - Updates an existing database specified by the - database ID. - - Returns: - Callable[[~.UpdateDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateDatabase', - request_serializer=metastore.UpdateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['update_database'] - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - metastore.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets the database specified by the resource name. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/GetDatabase', - request_serializer=metastore.GetDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - metastore.ListDatabasesResponse]: - r"""Return a callable for the list databases method over gRPC. - - List all databases in a specified catalog. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/ListDatabases', - request_serializer=metastore.ListDatabasesRequest.serialize, - response_deserializer=metastore.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - metastore.Table]: - r"""Return a callable for the create table method over gRPC. - - Creates a new table. - - Returns: - Callable[[~.CreateTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_table' not in self._stubs: - self._stubs['create_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/CreateTable', - request_serializer=metastore.CreateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['create_table'] - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - metastore.Table]: - r"""Return a callable for the delete table method over gRPC. - - Deletes an existing table specified by the table ID. - - Returns: - Callable[[~.DeleteTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_table' not in self._stubs: - self._stubs['delete_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteTable', - request_serializer=metastore.DeleteTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['delete_table'] - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - metastore.Table]: - r"""Return a callable for the update table method over gRPC. - - Updates an existing table specified by the table ID. - - Returns: - Callable[[~.UpdateTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_table' not in self._stubs: - self._stubs['update_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateTable', - request_serializer=metastore.UpdateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['update_table'] - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - metastore.Table]: - r"""Return a callable for the rename table method over gRPC. - - Renames an existing table specified by the table ID. - - Returns: - Callable[[~.RenameTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_table' not in self._stubs: - self._stubs['rename_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/RenameTable', - request_serializer=metastore.RenameTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['rename_table'] - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - metastore.Table]: - r"""Return a callable for the get table method over gRPC. - - Gets the table specified by the resource name. - - Returns: - Callable[[~.GetTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table' not in self._stubs: - self._stubs['get_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/GetTable', - request_serializer=metastore.GetTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['get_table'] - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - metastore.ListTablesResponse]: - r"""Return a callable for the list tables method over gRPC. - - List all tables in a specified database. - - Returns: - Callable[[~.ListTablesRequest], - ~.ListTablesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tables' not in self._stubs: - self._stubs['list_tables'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/ListTables', - request_serializer=metastore.ListTablesRequest.serialize, - response_deserializer=metastore.ListTablesResponse.deserialize, - ) - return self._stubs['list_tables'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MetastoreServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/grpc_asyncio.py deleted file mode 100644 index eee6170295cd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,814 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_biglake_v1.types import metastore -from .base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MetastoreServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MetastoreServiceGrpcAsyncIOTransport(MetastoreServiceTransport): - """gRPC AsyncIO backend transport for MetastoreService. - - BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - Awaitable[metastore.Catalog]]: - r"""Return a callable for the create catalog method over gRPC. - - Creates a new catalog. - - Returns: - Callable[[~.CreateCatalogRequest], - Awaitable[~.Catalog]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_catalog' not in self._stubs: - self._stubs['create_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/CreateCatalog', - request_serializer=metastore.CreateCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['create_catalog'] - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - Awaitable[metastore.Catalog]]: - r"""Return a callable for the delete catalog method over gRPC. - - Deletes an existing catalog specified by the catalog - ID. - - Returns: - Callable[[~.DeleteCatalogRequest], - Awaitable[~.Catalog]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_catalog' not in self._stubs: - self._stubs['delete_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteCatalog', - request_serializer=metastore.DeleteCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['delete_catalog'] - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - Awaitable[metastore.Catalog]]: - r"""Return a callable for the get catalog method over gRPC. - - Gets the catalog specified by the resource name. - - Returns: - Callable[[~.GetCatalogRequest], - Awaitable[~.Catalog]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_catalog' not in self._stubs: - self._stubs['get_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/GetCatalog', - request_serializer=metastore.GetCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['get_catalog'] - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - Awaitable[metastore.ListCatalogsResponse]]: - r"""Return a callable for the list catalogs method over gRPC. - - List all catalogs in a specified project. - - Returns: - Callable[[~.ListCatalogsRequest], - Awaitable[~.ListCatalogsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_catalogs' not in self._stubs: - self._stubs['list_catalogs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/ListCatalogs', - request_serializer=metastore.ListCatalogsRequest.serialize, - response_deserializer=metastore.ListCatalogsResponse.deserialize, - ) - return self._stubs['list_catalogs'] - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the create database method over gRPC. - - Creates a new database. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/CreateDatabase', - request_serializer=metastore.CreateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['create_database'] - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the delete database method over gRPC. - - Deletes an existing database specified by the - database ID. - - Returns: - Callable[[~.DeleteDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_database' not in self._stubs: - self._stubs['delete_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteDatabase', - request_serializer=metastore.DeleteDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['delete_database'] - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the update database method over gRPC. - - Updates an existing database specified by the - database ID. - - Returns: - Callable[[~.UpdateDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateDatabase', - request_serializer=metastore.UpdateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['update_database'] - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets the database specified by the resource name. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/GetDatabase', - request_serializer=metastore.GetDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - Awaitable[metastore.ListDatabasesResponse]]: - r"""Return a callable for the list databases method over gRPC. - - List all databases in a specified catalog. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/ListDatabases', - request_serializer=metastore.ListDatabasesRequest.serialize, - response_deserializer=metastore.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the create table method over gRPC. - - Creates a new table. - - Returns: - Callable[[~.CreateTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_table' not in self._stubs: - self._stubs['create_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/CreateTable', - request_serializer=metastore.CreateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['create_table'] - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the delete table method over gRPC. - - Deletes an existing table specified by the table ID. - - Returns: - Callable[[~.DeleteTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_table' not in self._stubs: - self._stubs['delete_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/DeleteTable', - request_serializer=metastore.DeleteTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['delete_table'] - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the update table method over gRPC. - - Updates an existing table specified by the table ID. - - Returns: - Callable[[~.UpdateTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_table' not in self._stubs: - self._stubs['update_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/UpdateTable', - request_serializer=metastore.UpdateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['update_table'] - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the rename table method over gRPC. - - Renames an existing table specified by the table ID. - - Returns: - Callable[[~.RenameTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_table' not in self._stubs: - self._stubs['rename_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/RenameTable', - request_serializer=metastore.RenameTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['rename_table'] - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the get table method over gRPC. - - Gets the table specified by the resource name. - - Returns: - Callable[[~.GetTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table' not in self._stubs: - self._stubs['get_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/GetTable', - request_serializer=metastore.GetTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['get_table'] - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - Awaitable[metastore.ListTablesResponse]]: - r"""Return a callable for the list tables method over gRPC. - - List all tables in a specified database. - - Returns: - Callable[[~.ListTablesRequest], - Awaitable[~.ListTablesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tables' not in self._stubs: - self._stubs['list_tables'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1.MetastoreService/ListTables', - request_serializer=metastore.ListTablesRequest.serialize, - response_deserializer=metastore.ListTablesResponse.deserialize, - ) - return self._stubs['list_tables'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_catalog: self._wrap_method( - self.create_catalog, - default_timeout=None, - client_info=client_info, - ), - self.delete_catalog: self._wrap_method( - self.delete_catalog, - default_timeout=None, - client_info=client_info, - ), - self.get_catalog: self._wrap_method( - self.get_catalog, - default_timeout=None, - client_info=client_info, - ), - self.list_catalogs: self._wrap_method( - self.list_catalogs, - default_timeout=None, - client_info=client_info, - ), - self.create_database: self._wrap_method( - self.create_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: self._wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.update_database: self._wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.get_database: self._wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: self._wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.create_table: self._wrap_method( - self.create_table, - default_timeout=None, - client_info=client_info, - ), - self.delete_table: self._wrap_method( - self.delete_table, - default_timeout=None, - client_info=client_info, - ), - self.update_table: self._wrap_method( - self.update_table, - default_timeout=None, - client_info=client_info, - ), - self.rename_table: self._wrap_method( - self.rename_table, - default_timeout=None, - client_info=client_info, - ), - self.get_table: self._wrap_method( - self.get_table, - default_timeout=None, - client_info=client_info, - ), - self.list_tables: self._wrap_method( - self.list_tables, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'MetastoreServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py deleted file mode 100644 index dbec8b0551ca..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest.py +++ /dev/null @@ -1,2796 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bigquery_biglake_v1.types import metastore - - -from .rest_base import _BaseMetastoreServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class MetastoreServiceRestInterceptor: - """Interceptor for MetastoreService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MetastoreServiceRestTransport. - - .. code-block:: python - class MyCustomMetastoreServiceInterceptor(MetastoreServiceRestInterceptor): - def pre_create_catalog(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_catalog(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_catalog(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_catalog(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_catalog(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_catalog(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_catalogs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_catalogs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_databases(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_databases(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_tables(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_tables(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_table(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MetastoreServiceRestTransport(interceptor=MyCustomMetastoreServiceInterceptor()) - client = MetastoreServiceClient(transport=transport) - - - """ - def pre_create_catalog(self, request: metastore.CreateCatalogRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateCatalogRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_catalog - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_catalog(self, response: metastore.Catalog) -> metastore.Catalog: - """Post-rpc interceptor for create_catalog - - DEPRECATED. Please use the `post_create_catalog_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_catalog` interceptor runs - before the `post_create_catalog_with_metadata` interceptor. - """ - return response - - def post_create_catalog_with_metadata(self, response: metastore.Catalog, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_catalog - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_catalog_with_metadata` - interceptor in new development instead of the `post_create_catalog` interceptor. - When both interceptors are used, this `post_create_catalog_with_metadata` interceptor runs after the - `post_create_catalog` interceptor. The (possibly modified) response returned by - `post_create_catalog` will be passed to - `post_create_catalog_with_metadata`. - """ - return response, metadata - - def pre_create_database(self, request: metastore.CreateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for create_database - - DEPRECATED. Please use the `post_create_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_database` interceptor runs - before the `post_create_database_with_metadata` interceptor. - """ - return response - - def post_create_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_database_with_metadata` - interceptor in new development instead of the `post_create_database` interceptor. - When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the - `post_create_database` interceptor. The (possibly modified) response returned by - `post_create_database` will be passed to - `post_create_database_with_metadata`. - """ - return response, metadata - - def pre_create_table(self, request: metastore.CreateTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for create_table - - DEPRECATED. Please use the `post_create_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_table` interceptor runs - before the `post_create_table_with_metadata` interceptor. - """ - return response - - def post_create_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_table_with_metadata` - interceptor in new development instead of the `post_create_table` interceptor. - When both interceptors are used, this `post_create_table_with_metadata` interceptor runs after the - `post_create_table` interceptor. The (possibly modified) response returned by - `post_create_table` will be passed to - `post_create_table_with_metadata`. - """ - return response, metadata - - def pre_delete_catalog(self, request: metastore.DeleteCatalogRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteCatalogRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_catalog - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_delete_catalog(self, response: metastore.Catalog) -> metastore.Catalog: - """Post-rpc interceptor for delete_catalog - - DEPRECATED. Please use the `post_delete_catalog_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_delete_catalog` interceptor runs - before the `post_delete_catalog_with_metadata` interceptor. - """ - return response - - def post_delete_catalog_with_metadata(self, response: metastore.Catalog, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_catalog - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_delete_catalog_with_metadata` - interceptor in new development instead of the `post_delete_catalog` interceptor. - When both interceptors are used, this `post_delete_catalog_with_metadata` interceptor runs after the - `post_delete_catalog` interceptor. The (possibly modified) response returned by - `post_delete_catalog` will be passed to - `post_delete_catalog_with_metadata`. - """ - return response, metadata - - def pre_delete_database(self, request: metastore.DeleteDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_delete_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for delete_database - - DEPRECATED. Please use the `post_delete_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_delete_database` interceptor runs - before the `post_delete_database_with_metadata` interceptor. - """ - return response - - def post_delete_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_delete_database_with_metadata` - interceptor in new development instead of the `post_delete_database` interceptor. - When both interceptors are used, this `post_delete_database_with_metadata` interceptor runs after the - `post_delete_database` interceptor. The (possibly modified) response returned by - `post_delete_database` will be passed to - `post_delete_database_with_metadata`. - """ - return response, metadata - - def pre_delete_table(self, request: metastore.DeleteTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_delete_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for delete_table - - DEPRECATED. Please use the `post_delete_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_delete_table` interceptor runs - before the `post_delete_table_with_metadata` interceptor. - """ - return response - - def post_delete_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_delete_table_with_metadata` - interceptor in new development instead of the `post_delete_table` interceptor. - When both interceptors are used, this `post_delete_table_with_metadata` interceptor runs after the - `post_delete_table` interceptor. The (possibly modified) response returned by - `post_delete_table` will be passed to - `post_delete_table_with_metadata`. - """ - return response, metadata - - def pre_get_catalog(self, request: metastore.GetCatalogRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.GetCatalogRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_catalog - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_get_catalog(self, response: metastore.Catalog) -> metastore.Catalog: - """Post-rpc interceptor for get_catalog - - DEPRECATED. Please use the `post_get_catalog_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_get_catalog` interceptor runs - before the `post_get_catalog_with_metadata` interceptor. - """ - return response - - def post_get_catalog_with_metadata(self, response: metastore.Catalog, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_catalog - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_get_catalog_with_metadata` - interceptor in new development instead of the `post_get_catalog` interceptor. - When both interceptors are used, this `post_get_catalog_with_metadata` interceptor runs after the - `post_get_catalog` interceptor. The (possibly modified) response returned by - `post_get_catalog` will be passed to - `post_get_catalog_with_metadata`. - """ - return response, metadata - - def pre_get_database(self, request: metastore.GetDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_get_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for get_database - - DEPRECATED. Please use the `post_get_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_get_database` interceptor runs - before the `post_get_database_with_metadata` interceptor. - """ - return response - - def post_get_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_get_database_with_metadata` - interceptor in new development instead of the `post_get_database` interceptor. - When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the - `post_get_database` interceptor. The (possibly modified) response returned by - `post_get_database` will be passed to - `post_get_database_with_metadata`. - """ - return response, metadata - - def pre_get_table(self, request: metastore.GetTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.GetTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_get_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for get_table - - DEPRECATED. Please use the `post_get_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_get_table` interceptor runs - before the `post_get_table_with_metadata` interceptor. - """ - return response - - def post_get_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_get_table_with_metadata` - interceptor in new development instead of the `post_get_table` interceptor. - When both interceptors are used, this `post_get_table_with_metadata` interceptor runs after the - `post_get_table` interceptor. The (possibly modified) response returned by - `post_get_table` will be passed to - `post_get_table_with_metadata`. - """ - return response, metadata - - def pre_list_catalogs(self, request: metastore.ListCatalogsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListCatalogsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_catalogs - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_catalogs(self, response: metastore.ListCatalogsResponse) -> metastore.ListCatalogsResponse: - """Post-rpc interceptor for list_catalogs - - DEPRECATED. Please use the `post_list_catalogs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_catalogs` interceptor runs - before the `post_list_catalogs_with_metadata` interceptor. - """ - return response - - def post_list_catalogs_with_metadata(self, response: metastore.ListCatalogsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListCatalogsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_catalogs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_catalogs_with_metadata` - interceptor in new development instead of the `post_list_catalogs` interceptor. - When both interceptors are used, this `post_list_catalogs_with_metadata` interceptor runs after the - `post_list_catalogs` interceptor. The (possibly modified) response returned by - `post_list_catalogs` will be passed to - `post_list_catalogs_with_metadata`. - """ - return response, metadata - - def pre_list_databases(self, request: metastore.ListDatabasesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_databases - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_databases(self, response: metastore.ListDatabasesResponse) -> metastore.ListDatabasesResponse: - """Post-rpc interceptor for list_databases - - DEPRECATED. Please use the `post_list_databases_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_databases` interceptor runs - before the `post_list_databases_with_metadata` interceptor. - """ - return response - - def post_list_databases_with_metadata(self, response: metastore.ListDatabasesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_databases - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_databases_with_metadata` - interceptor in new development instead of the `post_list_databases` interceptor. - When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the - `post_list_databases` interceptor. The (possibly modified) response returned by - `post_list_databases` will be passed to - `post_list_databases_with_metadata`. - """ - return response, metadata - - def pre_list_tables(self, request: metastore.ListTablesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListTablesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_tables - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_tables(self, response: metastore.ListTablesResponse) -> metastore.ListTablesResponse: - """Post-rpc interceptor for list_tables - - DEPRECATED. Please use the `post_list_tables_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_tables` interceptor runs - before the `post_list_tables_with_metadata` interceptor. - """ - return response - - def post_list_tables_with_metadata(self, response: metastore.ListTablesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListTablesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_tables - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_tables_with_metadata` - interceptor in new development instead of the `post_list_tables` interceptor. - When both interceptors are used, this `post_list_tables_with_metadata` interceptor runs after the - `post_list_tables` interceptor. The (possibly modified) response returned by - `post_list_tables` will be passed to - `post_list_tables_with_metadata`. - """ - return response, metadata - - def pre_rename_table(self, request: metastore.RenameTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.RenameTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_rename_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for rename_table - - DEPRECATED. Please use the `post_rename_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_rename_table` interceptor runs - before the `post_rename_table_with_metadata` interceptor. - """ - return response - - def post_rename_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_rename_table_with_metadata` - interceptor in new development instead of the `post_rename_table` interceptor. - When both interceptors are used, this `post_rename_table_with_metadata` interceptor runs after the - `post_rename_table` interceptor. The (possibly modified) response returned by - `post_rename_table` will be passed to - `post_rename_table_with_metadata`. - """ - return response, metadata - - def pre_update_database(self, request: metastore.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_update_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for update_database - - DEPRECATED. Please use the `post_update_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_update_database` interceptor runs - before the `post_update_database_with_metadata` interceptor. - """ - return response - - def post_update_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_update_database_with_metadata` - interceptor in new development instead of the `post_update_database` interceptor. - When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the - `post_update_database` interceptor. The (possibly modified) response returned by - `post_update_database` will be passed to - `post_update_database_with_metadata`. - """ - return response, metadata - - def pre_update_table(self, request: metastore.UpdateTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.UpdateTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_update_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for update_table - - DEPRECATED. Please use the `post_update_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_update_table` interceptor runs - before the `post_update_table_with_metadata` interceptor. - """ - return response - - def post_update_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_update_table_with_metadata` - interceptor in new development instead of the `post_update_table` interceptor. - When both interceptors are used, this `post_update_table_with_metadata` interceptor runs after the - `post_update_table` interceptor. The (possibly modified) response returned by - `post_update_table` will be passed to - `post_update_table_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class MetastoreServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: MetastoreServiceRestInterceptor - - -class MetastoreServiceRestTransport(_BaseMetastoreServiceRestTransport): - """REST backend synchronous transport for MetastoreService. - - BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MetastoreServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MetastoreServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateCatalog(_BaseMetastoreServiceRestTransport._BaseCreateCatalog, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateCatalog") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateCatalogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Catalog: - r"""Call the create catalog method over HTTP. - - Args: - request (~.metastore.CreateCatalogRequest): - The request object. Request message for the CreateCatalog - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Catalog: - Catalog is the container of - databases. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_http_options() - - request, metadata = self._interceptor.pre_create_catalog(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.CreateCatalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "CreateCatalog", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateCatalog._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Catalog() - pb_resp = metastore.Catalog.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_catalog(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_catalog_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Catalog.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.create_catalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "CreateCatalog", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDatabase(_BaseMetastoreServiceRestTransport._BaseCreateDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the create database method over HTTP. - - Args: - request (~.metastore.CreateDatabaseRequest): - The request object. Request message for the - CreateDatabase method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_create_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.CreateDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "CreateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.create_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "CreateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateTable(_BaseMetastoreServiceRestTransport._BaseCreateTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the create table method over HTTP. - - Args: - request (~.metastore.CreateTableRequest): - The request object. Request message for the CreateTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_http_options() - - request, metadata = self._interceptor.pre_create_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.CreateTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "CreateTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.create_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "CreateTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteCatalog(_BaseMetastoreServiceRestTransport._BaseDeleteCatalog, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteCatalog") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteCatalogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Catalog: - r"""Call the delete catalog method over HTTP. - - Args: - request (~.metastore.DeleteCatalogRequest): - The request object. Request message for the DeleteCatalog - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Catalog: - Catalog is the container of - databases. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_http_options() - - request, metadata = self._interceptor.pre_delete_catalog(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.DeleteCatalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "DeleteCatalog", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteCatalog._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Catalog() - pb_resp = metastore.Catalog.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_catalog(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_catalog_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Catalog.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.delete_catalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "DeleteCatalog", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDatabase(_BaseMetastoreServiceRestTransport._BaseDeleteDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the delete database method over HTTP. - - Args: - request (~.metastore.DeleteDatabaseRequest): - The request object. Request message for the - DeleteDatabase method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_http_options() - - request, metadata = self._interceptor.pre_delete_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.DeleteDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "DeleteDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.delete_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "DeleteDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteTable(_BaseMetastoreServiceRestTransport._BaseDeleteTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the delete table method over HTTP. - - Args: - request (~.metastore.DeleteTableRequest): - The request object. Request message for the DeleteTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteTable._get_http_options() - - request, metadata = self._interceptor.pre_delete_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteTable._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.DeleteTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "DeleteTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.delete_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "DeleteTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetCatalog(_BaseMetastoreServiceRestTransport._BaseGetCatalog, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.GetCatalog") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.GetCatalogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Catalog: - r"""Call the get catalog method over HTTP. - - Args: - request (~.metastore.GetCatalogRequest): - The request object. Request message for the GetCatalog - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Catalog: - Catalog is the container of - databases. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseGetCatalog._get_http_options() - - request, metadata = self._interceptor.pre_get_catalog(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseGetCatalog._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseGetCatalog._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.GetCatalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "GetCatalog", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._GetCatalog._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Catalog() - pb_resp = metastore.Catalog.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_catalog(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_catalog_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Catalog.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.get_catalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "GetCatalog", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabase(_BaseMetastoreServiceRestTransport._BaseGetDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.GetDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.GetDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the get database method over HTTP. - - Args: - request (~.metastore.GetDatabaseRequest): - The request object. Request message for the GetDatabase - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseGetDatabase._get_http_options() - - request, metadata = self._interceptor.pre_get_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseGetDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseGetDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.GetDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "GetDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._GetDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.get_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "GetDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTable(_BaseMetastoreServiceRestTransport._BaseGetTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.GetTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.GetTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the get table method over HTTP. - - Args: - request (~.metastore.GetTableRequest): - The request object. Request message for the GetTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseGetTable._get_http_options() - - request, metadata = self._interceptor.pre_get_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseGetTable._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseGetTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.GetTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "GetTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._GetTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.get_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "GetTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListCatalogs(_BaseMetastoreServiceRestTransport._BaseListCatalogs, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListCatalogs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListCatalogsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListCatalogsResponse: - r"""Call the list catalogs method over HTTP. - - Args: - request (~.metastore.ListCatalogsRequest): - The request object. Request message for the ListCatalogs - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListCatalogsResponse: - Response message for the ListCatalogs - method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListCatalogs._get_http_options() - - request, metadata = self._interceptor.pre_list_catalogs(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListCatalogs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListCatalogs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.ListCatalogs", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "ListCatalogs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListCatalogs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListCatalogsResponse() - pb_resp = metastore.ListCatalogsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_catalogs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_catalogs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListCatalogsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.list_catalogs", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "ListCatalogs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabases(_BaseMetastoreServiceRestTransport._BaseListDatabases, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListDatabases") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListDatabasesResponse: - r"""Call the list databases method over HTTP. - - Args: - request (~.metastore.ListDatabasesRequest): - The request object. Request message for the ListDatabases - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListDatabasesResponse: - Response message for the - ListDatabases method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListDatabases._get_http_options() - - request, metadata = self._interceptor.pre_list_databases(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListDatabases._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListDatabases._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.ListDatabases", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "ListDatabases", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListDatabases._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListDatabasesResponse() - pb_resp = metastore.ListDatabasesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_databases(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_databases_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListDatabasesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.list_databases", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "ListDatabases", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTables(_BaseMetastoreServiceRestTransport._BaseListTables, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListTables") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListTablesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListTablesResponse: - r"""Call the list tables method over HTTP. - - Args: - request (~.metastore.ListTablesRequest): - The request object. Request message for the ListTables - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListTablesResponse: - Response message for the ListTables - method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListTables._get_http_options() - - request, metadata = self._interceptor.pre_list_tables(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListTables._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListTables._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.ListTables", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "ListTables", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListTables._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListTablesResponse() - pb_resp = metastore.ListTablesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_tables(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_tables_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListTablesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.list_tables", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "ListTables", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameTable(_BaseMetastoreServiceRestTransport._BaseRenameTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.RenameTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.RenameTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the rename table method over HTTP. - - Args: - request (~.metastore.RenameTableRequest): - The request object. Request message for the RenameTable - method in MetastoreService - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_http_options() - - request, metadata = self._interceptor.pre_rename_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.RenameTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "RenameTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._RenameTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.rename_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "RenameTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabase(_BaseMetastoreServiceRestTransport._BaseUpdateDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.UpdateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.UpdateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the update database method over HTTP. - - Args: - request (~.metastore.UpdateDatabaseRequest): - The request object. Request message for the - UpdateDatabase method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_update_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.UpdateDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "UpdateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._UpdateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.update_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "UpdateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateTable(_BaseMetastoreServiceRestTransport._BaseUpdateTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.UpdateTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.UpdateTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the update table method over HTTP. - - Args: - request (~.metastore.UpdateTableRequest): - The request object. Request message for the UpdateTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_http_options() - - request, metadata = self._interceptor.pre_update_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.UpdateTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "UpdateTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._UpdateTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1.MetastoreServiceClient.update_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "rpcName": "UpdateTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - metastore.Catalog]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateCatalog(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - metastore.Catalog]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteCatalog(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - metastore.Catalog]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCatalog(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - metastore.ListCatalogsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCatalogs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - metastore.ListDatabasesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - metastore.ListTablesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTables(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MetastoreServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest_base.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest_base.py deleted file mode 100644 index 30976c0ade55..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/services/metastore_service/transports/rest_base.py +++ /dev/null @@ -1,706 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bigquery_biglake_v1.types import metastore - - -class _BaseMetastoreServiceRestTransport(MetastoreServiceTransport): - """Base REST backend transport for MetastoreService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateCatalog: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "catalogId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/catalogs', - 'body': 'catalog', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateCatalogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "databaseId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/catalogs/*}/databases', - 'body': 'database', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "tableId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables', - 'body': 'table', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteCatalog: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteCatalogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetCatalog: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.GetCatalogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseGetCatalog._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseGetDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.GetTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseGetTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListCatalogs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/catalogs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListCatalogsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListCatalogs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabases: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/catalogs/*}/databases', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListDatabases._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTables: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListTablesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListTables._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.RenameTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseRenameTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{database.name=projects/*/locations/*/catalogs/*/databases/*}', - 'body': 'database', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}', - 'body': 'table', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.UpdateTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseUpdateTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseMetastoreServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/types/__init__.py deleted file mode 100644 index 010a4e6f09cd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/types/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .metastore import ( - Catalog, - CreateCatalogRequest, - CreateDatabaseRequest, - CreateTableRequest, - Database, - DeleteCatalogRequest, - DeleteDatabaseRequest, - DeleteTableRequest, - GetCatalogRequest, - GetDatabaseRequest, - GetTableRequest, - HiveDatabaseOptions, - HiveTableOptions, - ListCatalogsRequest, - ListCatalogsResponse, - ListDatabasesRequest, - ListDatabasesResponse, - ListTablesRequest, - ListTablesResponse, - RenameTableRequest, - Table, - UpdateDatabaseRequest, - UpdateTableRequest, - TableView, -) - -__all__ = ( - 'Catalog', - 'CreateCatalogRequest', - 'CreateDatabaseRequest', - 'CreateTableRequest', - 'Database', - 'DeleteCatalogRequest', - 'DeleteDatabaseRequest', - 'DeleteTableRequest', - 'GetCatalogRequest', - 'GetDatabaseRequest', - 'GetTableRequest', - 'HiveDatabaseOptions', - 'HiveTableOptions', - 'ListCatalogsRequest', - 'ListCatalogsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'ListTablesRequest', - 'ListTablesResponse', - 'RenameTableRequest', - 'Table', - 'UpdateDatabaseRequest', - 'UpdateTableRequest', - 'TableView', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/types/metastore.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/types/metastore.py deleted file mode 100644 index 32d8e0a524f9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/google/cloud/bigquery_biglake_v1/types/metastore.py +++ /dev/null @@ -1,866 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.biglake.v1', - manifest={ - 'TableView', - 'Catalog', - 'Database', - 'Table', - 'CreateCatalogRequest', - 'DeleteCatalogRequest', - 'GetCatalogRequest', - 'ListCatalogsRequest', - 'ListCatalogsResponse', - 'CreateDatabaseRequest', - 'DeleteDatabaseRequest', - 'UpdateDatabaseRequest', - 'GetDatabaseRequest', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'CreateTableRequest', - 'DeleteTableRequest', - 'UpdateTableRequest', - 'RenameTableRequest', - 'GetTableRequest', - 'ListTablesRequest', - 'ListTablesResponse', - 'HiveDatabaseOptions', - 'HiveTableOptions', - }, -) - - -class TableView(proto.Enum): - r"""View on Table. Represents which fields will be populated for - calls that return Table objects. - - Values: - TABLE_VIEW_UNSPECIFIED (0): - Default value. The API will default to the - BASIC view. - BASIC (1): - Include only table names. - This is the default value. - FULL (2): - Include everything. - """ - TABLE_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - -class Catalog(proto.Message): - r"""Catalog is the container of databases. - - Attributes: - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the - catalog. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last modification time of - the catalog. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deletion time of the - catalog. Only set after the catalog is deleted. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this catalog is - considered expired. Only set after the catalog - is deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class Database(proto.Message): - r"""Database is the container of tables. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hive_options (google.cloud.bigquery_biglake_v1.types.HiveDatabaseOptions): - Options of a Hive database. - - This field is a member of `oneof`_ ``options``. - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the - database. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last modification time of - the database. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deletion time of the - database. Only set after the database is - deleted. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this database is - considered expired. Only set after the database - is deleted. - type_ (google.cloud.bigquery_biglake_v1.types.Database.Type): - The database type. - """ - class Type(proto.Enum): - r"""The database type. - - Values: - TYPE_UNSPECIFIED (0): - The type is not specified. - HIVE (1): - Represents a database storing tables - compatible with Hive Metastore tables. - """ - TYPE_UNSPECIFIED = 0 - HIVE = 1 - - hive_options: 'HiveDatabaseOptions' = proto.Field( - proto.MESSAGE, - number=7, - oneof='options', - message='HiveDatabaseOptions', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - type_: Type = proto.Field( - proto.ENUM, - number=6, - enum=Type, - ) - - -class Table(proto.Message): - r"""Represents a table. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hive_options (google.cloud.bigquery_biglake_v1.types.HiveTableOptions): - Options of a Hive table. - - This field is a member of `oneof`_ ``options``. - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the table. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last modification time of - the table. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deletion time of the table. - Only set after the table is deleted. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this table is - considered expired. Only set after the table is - deleted. - type_ (google.cloud.bigquery_biglake_v1.types.Table.Type): - The table type. - etag (str): - The checksum of a table object computed by - the server based on the value of other fields. - It may be sent on update requests to ensure the - client has an up-to-date value before - proceeding. It is only checked for update table - operations. - """ - class Type(proto.Enum): - r"""The table type. - - Values: - TYPE_UNSPECIFIED (0): - The type is not specified. - HIVE (1): - Represents a table compatible with Hive - Metastore tables. - """ - TYPE_UNSPECIFIED = 0 - HIVE = 1 - - hive_options: 'HiveTableOptions' = proto.Field( - proto.MESSAGE, - number=7, - oneof='options', - message='HiveTableOptions', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - type_: Type = proto.Field( - proto.ENUM, - number=6, - enum=Type, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - - -class CreateCatalogRequest(proto.Message): - r"""Request message for the CreateCatalog method. - - Attributes: - parent (str): - Required. The parent resource where this catalog will be - created. Format: - projects/{project_id_or_number}/locations/{location_id} - catalog (google.cloud.bigquery_biglake_v1.types.Catalog): - Required. The catalog to create. The ``name`` field does not - need to be provided. - catalog_id (str): - Required. The ID to use for the catalog, - which will become the final component of the - catalog's resource name. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - catalog: 'Catalog' = proto.Field( - proto.MESSAGE, - number=2, - message='Catalog', - ) - catalog_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteCatalogRequest(proto.Message): - r"""Request message for the DeleteCatalog method. - - Attributes: - name (str): - Required. The name of the catalog to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetCatalogRequest(proto.Message): - r"""Request message for the GetCatalog method. - - Attributes: - name (str): - Required. The name of the catalog to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListCatalogsRequest(proto.Message): - r"""Request message for the ListCatalogs method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - catalogs. Format: - projects/{project_id_or_number}/locations/{location_id} - page_size (int): - The maximum number of catalogs to return. The - service may return fewer than this value. - If unspecified, at most 50 catalogs will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListCatalogs`` - call. Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListCatalogs`` must match the call that provided the page - token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListCatalogsResponse(proto.Message): - r"""Response message for the ListCatalogs method. - - Attributes: - catalogs (MutableSequence[google.cloud.bigquery_biglake_v1.types.Catalog]): - The catalogs from the specified project. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - catalogs: MutableSequence['Catalog'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Catalog', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""Request message for the CreateDatabase method. - - Attributes: - parent (str): - Required. The parent resource where this database will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - database (google.cloud.bigquery_biglake_v1.types.Database): - Required. The database to create. The ``name`` field does - not need to be provided. - database_id (str): - Required. The ID to use for the database, - which will become the final component of the - database's resource name. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database: 'Database' = proto.Field( - proto.MESSAGE, - number=2, - message='Database', - ) - database_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteDatabaseRequest(proto.Message): - r"""Request message for the DeleteDatabase method. - - Attributes: - name (str): - Required. The name of the database to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseRequest(proto.Message): - r"""Request message for the UpdateDatabase method. - - Attributes: - database (google.cloud.bigquery_biglake_v1.types.Database): - Required. The database to update. - - The database's ``name`` field is used to identify the - database to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - """ - - database: 'Database' = proto.Field( - proto.MESSAGE, - number=1, - message='Database', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetDatabaseRequest(proto.Message): - r"""Request message for the GetDatabase method. - - Attributes: - name (str): - Required. The name of the database to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDatabasesRequest(proto.Message): - r"""Request message for the ListDatabases method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - databases. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - page_size (int): - The maximum number of databases to return. - The service may return fewer than this value. If - unspecified, at most 50 databases will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListDatabases`` - call. Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListDatabases`` must match the call that provided the page - token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDatabasesResponse(proto.Message): - r"""Response message for the ListDatabases method. - - Attributes: - databases (MutableSequence[google.cloud.bigquery_biglake_v1.types.Database]): - The databases from the specified catalog. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - databases: MutableSequence['Database'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Database', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateTableRequest(proto.Message): - r"""Request message for the CreateTable method. - - Attributes: - parent (str): - Required. The parent resource where this table will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - table (google.cloud.bigquery_biglake_v1.types.Table): - Required. The table to create. The ``name`` field does not - need to be provided for the table creation. - table_id (str): - Required. The ID to use for the table, which - will become the final component of the table's - resource name. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - table: 'Table' = proto.Field( - proto.MESSAGE, - number=2, - message='Table', - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteTableRequest(proto.Message): - r"""Request message for the DeleteTable method. - - Attributes: - name (str): - Required. The name of the table to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTableRequest(proto.Message): - r"""Request message for the UpdateTable method. - - Attributes: - table (google.cloud.bigquery_biglake_v1.types.Table): - Required. The table to update. - - The table's ``name`` field is used to identify the table to - update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - """ - - table: 'Table' = proto.Field( - proto.MESSAGE, - number=1, - message='Table', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RenameTableRequest(proto.Message): - r"""Request message for the RenameTable method in - MetastoreService - - Attributes: - name (str): - Required. The table's ``name`` field is used to identify the - table to rename. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - new_name (str): - Required. The new ``name`` for the specified table, must be - in the same database. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTableRequest(proto.Message): - r"""Request message for the GetTable method. - - Attributes: - name (str): - Required. The name of the table to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTablesRequest(proto.Message): - r"""Request message for the ListTables method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of tables. - Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - page_size (int): - The maximum number of tables to return. The - service may return fewer than this value. - If unspecified, at most 50 tables will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListTables`` call. - Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListTables`` must match the call that provided the page - token. - view (google.cloud.bigquery_biglake_v1.types.TableView): - The view for the returned tables. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - view: 'TableView' = proto.Field( - proto.ENUM, - number=4, - enum='TableView', - ) - - -class ListTablesResponse(proto.Message): - r"""Response message for the ListTables method. - - Attributes: - tables (MutableSequence[google.cloud.bigquery_biglake_v1.types.Table]): - The tables from the specified database. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - tables: MutableSequence['Table'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Table', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class HiveDatabaseOptions(proto.Message): - r"""Options of a Hive database. - - Attributes: - location_uri (str): - Cloud Storage folder URI where the database - data is stored, starting with "gs://". - parameters (MutableMapping[str, str]): - Stores user supplied Hive database - parameters. - """ - - location_uri: str = proto.Field( - proto.STRING, - number=1, - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class HiveTableOptions(proto.Message): - r"""Options of a Hive table. - - Attributes: - parameters (MutableMapping[str, str]): - Stores user supplied Hive table parameters. - table_type (str): - Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE. - storage_descriptor (google.cloud.bigquery_biglake_v1.types.HiveTableOptions.StorageDescriptor): - Stores physical storage information of the - data. - """ - - class SerDeInfo(proto.Message): - r"""Serializer and deserializer information. - - Attributes: - serialization_lib (str): - The fully qualified Java class name of the - serialization library. - """ - - serialization_lib: str = proto.Field( - proto.STRING, - number=1, - ) - - class StorageDescriptor(proto.Message): - r"""Stores physical storage information of the data. - - Attributes: - location_uri (str): - Cloud Storage folder URI where the table data - is stored, starting with "gs://". - input_format (str): - The fully qualified Java class name of the - input format. - output_format (str): - The fully qualified Java class name of the - output format. - serde_info (google.cloud.bigquery_biglake_v1.types.HiveTableOptions.SerDeInfo): - Serializer and deserializer information. - """ - - location_uri: str = proto.Field( - proto.STRING, - number=1, - ) - input_format: str = proto.Field( - proto.STRING, - number=2, - ) - output_format: str = proto.Field( - proto.STRING, - number=3, - ) - serde_info: 'HiveTableOptions.SerDeInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='HiveTableOptions.SerDeInfo', - ) - - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - table_type: str = proto.Field( - proto.STRING, - number=2, - ) - storage_descriptor: StorageDescriptor = proto.Field( - proto.MESSAGE, - number=3, - message=StorageDescriptor, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-biglake/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/noxfile.py deleted file mode 100644 index b53ff787cf69..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-biglake' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_biglake_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_biglake_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_async.py deleted file mode 100644 index b94c8a21190e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_CreateCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = await client.create_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_CreateCatalog_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_sync.py deleted file mode 100644 index 4dd00bc3bfda..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_catalog_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_CreateCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = client.create_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_CreateCatalog_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_async.py deleted file mode 100644 index b944c6f9702e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_CreateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_create_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = await client.create_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_CreateDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_sync.py deleted file mode 100644 index 09a6fa551cb4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_database_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_CreateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_create_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = client.create_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_CreateDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_async.py deleted file mode 100644 index 65c3d5640731..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_CreateTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_create_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = await client.create_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_CreateTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_sync.py deleted file mode 100644 index 94702c896529..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_create_table_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_CreateTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_create_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = client.create_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_CreateTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_async.py deleted file mode 100644 index 4bd565a6538e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_DeleteCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_DeleteCatalog_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_sync.py deleted file mode 100644 index 0bb4e02e0a2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_catalog_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_DeleteCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.delete_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_DeleteCatalog_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_async.py deleted file mode 100644 index 5846716b0e49..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_DeleteDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_DeleteDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_sync.py deleted file mode 100644 index cd1242468c2a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_DeleteDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.delete_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_DeleteDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_async.py deleted file mode 100644 index 331a241afb6d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_DeleteTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_DeleteTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_sync.py deleted file mode 100644 index 2dd05dc5d038..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_delete_table_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_DeleteTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = client.delete_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_DeleteTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_async.py deleted file mode 100644 index e00ec68cbcea..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_GetCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.get_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_GetCatalog_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_sync.py deleted file mode 100644 index 85ec57038997..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_catalog_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_GetCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.get_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_GetCatalog_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_async.py deleted file mode 100644 index 9a9468ff1695..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_GetDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_get_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_GetDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_sync.py deleted file mode 100644 index fc4ca86c2256..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_GetDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_get_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_GetDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_async.py deleted file mode 100644 index 0a014a1b4cae..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_GetTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_get_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_GetTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_sync.py deleted file mode 100644 index 6dbc80f95fd9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_get_table_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_GetTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_get_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = client.get_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_GetTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_async.py deleted file mode 100644 index 7120f28b7e3c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCatalogs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_ListCatalogs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1_generated_MetastoreService_ListCatalogs_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_sync.py deleted file mode 100644 index f8d32126fa1f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_catalogs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCatalogs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_ListCatalogs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1_generated_MetastoreService_ListCatalogs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_async.py deleted file mode 100644 index a9a32c673ee8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_ListDatabases_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1_generated_MetastoreService_ListDatabases_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_sync.py deleted file mode 100644 index 61a37934fdba..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_databases_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_ListDatabases_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1_generated_MetastoreService_ListDatabases_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_async.py deleted file mode 100644 index 17d77ab2b400..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTables -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_ListTables_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1_generated_MetastoreService_ListTables_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_sync.py deleted file mode 100644 index e8f7fe4341b3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_list_tables_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTables -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_ListTables_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1_generated_MetastoreService_ListTables_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_async.py deleted file mode 100644 index 87fbdf0f7067..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_RenameTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = await client.rename_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_RenameTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_sync.py deleted file mode 100644 index 53f62fa2ffa3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_rename_table_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_RenameTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = client.rename_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_RenameTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_async.py deleted file mode 100644 index 05726bdbf3f9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_UpdateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_update_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateDatabaseRequest( - ) - - # Make the request - response = await client.update_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_UpdateDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_sync.py deleted file mode 100644 index 5f64c4651f73..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_database_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_UpdateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_update_database(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateDatabaseRequest( - ) - - # Make the request - response = client.update_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_UpdateDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_async.py deleted file mode 100644 index ee5f96ff214b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_UpdateTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -async def sample_update_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateTableRequest( - ) - - # Make the request - response = await client.update_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_UpdateTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_sync.py deleted file mode 100644 index 4321f1656054..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/biglake_v1_generated_metastore_service_update_table_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1_generated_MetastoreService_UpdateTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1 - - -def sample_update_table(): - # Create a client - client = bigquery_biglake_v1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1.UpdateTableRequest( - ) - - # Make the request - response = client.update_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1_generated_MetastoreService_UpdateTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json deleted file mode 100644 index 73ac8861a130..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ /dev/null @@ -1,2502 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.biglake.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-biglake", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.create_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.CreateCatalogRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "catalog", - "type": "google.cloud.bigquery_biglake_v1.types.Catalog" - }, - { - "name": "catalog_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Catalog", - "shortName": "create_catalog" - }, - "description": "Sample for CreateCatalog", - "file": "biglake_v1_generated_metastore_service_create_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_CreateCatalog_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_create_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.create_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.CreateCatalogRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "catalog", - "type": "google.cloud.bigquery_biglake_v1.types.Catalog" - }, - { - "name": "catalog_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Catalog", - "shortName": "create_catalog" - }, - "description": "Sample for CreateCatalog", - "file": "biglake_v1_generated_metastore_service_create_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_CreateCatalog_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_create_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.create_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1.types.Database" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "biglake_v1_generated_metastore_service_create_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_CreateDatabase_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_create_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.create_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1.types.Database" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "biglake_v1_generated_metastore_service_create_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_CreateDatabase_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_create_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.create_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.CreateTableRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1.types.Table" - }, - { - "name": "table_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "create_table" - }, - "description": "Sample for CreateTable", - "file": "biglake_v1_generated_metastore_service_create_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_CreateTable_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_create_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.create_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.CreateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.CreateTableRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1.types.Table" - }, - { - "name": "table_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "create_table" - }, - "description": "Sample for CreateTable", - "file": "biglake_v1_generated_metastore_service_create_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_CreateTable_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_create_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.delete_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.DeleteCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Catalog", - "shortName": "delete_catalog" - }, - "description": "Sample for DeleteCatalog", - "file": "biglake_v1_generated_metastore_service_delete_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_DeleteCatalog_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_delete_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.delete_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.DeleteCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Catalog", - "shortName": "delete_catalog" - }, - "description": "Sample for DeleteCatalog", - "file": "biglake_v1_generated_metastore_service_delete_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_DeleteCatalog_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_delete_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.delete_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.DeleteDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "delete_database" - }, - "description": "Sample for DeleteDatabase", - "file": "biglake_v1_generated_metastore_service_delete_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_DeleteDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_delete_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.delete_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.DeleteDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "delete_database" - }, - "description": "Sample for DeleteDatabase", - "file": "biglake_v1_generated_metastore_service_delete_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_DeleteDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_delete_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.delete_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.DeleteTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "delete_table" - }, - "description": "Sample for DeleteTable", - "file": "biglake_v1_generated_metastore_service_delete_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_DeleteTable_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_delete_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.delete_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.DeleteTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.DeleteTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "delete_table" - }, - "description": "Sample for DeleteTable", - "file": "biglake_v1_generated_metastore_service_delete_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_DeleteTable_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_delete_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.get_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.GetCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Catalog", - "shortName": "get_catalog" - }, - "description": "Sample for GetCatalog", - "file": "biglake_v1_generated_metastore_service_get_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_GetCatalog_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_get_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.get_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.GetCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Catalog", - "shortName": "get_catalog" - }, - "description": "Sample for GetCatalog", - "file": "biglake_v1_generated_metastore_service_get_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_GetCatalog_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_get_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.get_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "biglake_v1_generated_metastore_service_get_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_GetDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_get_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.get_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "biglake_v1_generated_metastore_service_get_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_GetDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_get_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.get_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.GetTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "get_table" - }, - "description": "Sample for GetTable", - "file": "biglake_v1_generated_metastore_service_get_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_GetTable_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_get_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.get_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.GetTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.GetTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "get_table" - }, - "description": "Sample for GetTable", - "file": "biglake_v1_generated_metastore_service_get_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_GetTable_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_get_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.list_catalogs", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListCatalogs", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListCatalogs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.ListCatalogsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListCatalogsAsyncPager", - "shortName": "list_catalogs" - }, - "description": "Sample for ListCatalogs", - "file": "biglake_v1_generated_metastore_service_list_catalogs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_ListCatalogs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_list_catalogs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.list_catalogs", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListCatalogs", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListCatalogs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.ListCatalogsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListCatalogsPager", - "shortName": "list_catalogs" - }, - "description": "Sample for ListCatalogs", - "file": "biglake_v1_generated_metastore_service_list_catalogs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_ListCatalogs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_list_catalogs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.list_databases", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListDatabases", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListDatabasesAsyncPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "biglake_v1_generated_metastore_service_list_databases_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_ListDatabases_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_list_databases_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.list_databases", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListDatabases", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListDatabasesPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "biglake_v1_generated_metastore_service_list_databases_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_ListDatabases_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_list_databases_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.list_tables", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListTables", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListTables" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.ListTablesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListTablesAsyncPager", - "shortName": "list_tables" - }, - "description": "Sample for ListTables", - "file": "biglake_v1_generated_metastore_service_list_tables_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_ListTables_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_list_tables_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.list_tables", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.ListTables", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListTables" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.ListTablesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.services.metastore_service.pagers.ListTablesPager", - "shortName": "list_tables" - }, - "description": "Sample for ListTables", - "file": "biglake_v1_generated_metastore_service_list_tables_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_ListTables_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_list_tables_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.rename_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.RenameTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "RenameTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.RenameTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "rename_table" - }, - "description": "Sample for RenameTable", - "file": "biglake_v1_generated_metastore_service_rename_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_RenameTable_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_rename_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.rename_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.RenameTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "RenameTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.RenameTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "rename_table" - }, - "description": "Sample for RenameTable", - "file": "biglake_v1_generated_metastore_service_rename_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_RenameTable_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_rename_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.update_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "biglake_v1_generated_metastore_service_update_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_UpdateDatabase_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_update_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.update_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Database", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "biglake_v1_generated_metastore_service_update_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_UpdateDatabase_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_update_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceAsyncClient.update_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.UpdateTableRequest" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1.types.Table" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "update_table" - }, - "description": "Sample for UpdateTable", - "file": "biglake_v1_generated_metastore_service_update_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_UpdateTable_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_update_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1.MetastoreServiceClient.update_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService.UpdateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1.types.UpdateTableRequest" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1.types.Table" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1.types.Table", - "shortName": "update_table" - }, - "description": "Sample for UpdateTable", - "file": "biglake_v1_generated_metastore_service_update_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1_generated_MetastoreService_UpdateTable_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1_generated_metastore_service_update_table_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/scripts/fixup_bigquery_biglake_v1_keywords.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/scripts/fixup_bigquery_biglake_v1_keywords.py deleted file mode 100644 index f1eb92cf45a9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/scripts/fixup_bigquery_biglake_v1_keywords.py +++ /dev/null @@ -1,190 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_biglakeCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_catalog': ('parent', 'catalog', 'catalog_id', ), - 'create_database': ('parent', 'database', 'database_id', ), - 'create_table': ('parent', 'table', 'table_id', ), - 'delete_catalog': ('name', ), - 'delete_database': ('name', ), - 'delete_table': ('name', ), - 'get_catalog': ('name', ), - 'get_database': ('name', ), - 'get_table': ('name', ), - 'list_catalogs': ('parent', 'page_size', 'page_token', ), - 'list_databases': ('parent', 'page_size', 'page_token', ), - 'list_tables': ('parent', 'page_size', 'page_token', 'view', ), - 'rename_table': ('name', 'new_name', ), - 'update_database': ('database', 'update_mask', ), - 'update_table': ('table', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_biglakeCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_biglake client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/setup.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/setup.py deleted file mode 100644 index 196451d4b5ad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-biglake' - - -description = "Google Cloud Bigquery Biglake API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_biglake/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/bigquery_biglake_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/bigquery_biglake_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/bigquery_biglake_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py deleted file mode 100644 index b4d551bf59d3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1/tests/unit/gapic/bigquery_biglake_v1/test_metastore_service.py +++ /dev/null @@ -1,12762 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_biglake_v1.services.metastore_service import MetastoreServiceAsyncClient -from google.cloud.bigquery_biglake_v1.services.metastore_service import MetastoreServiceClient -from google.cloud.bigquery_biglake_v1.services.metastore_service import pagers -from google.cloud.bigquery_biglake_v1.services.metastore_service import transports -from google.cloud.bigquery_biglake_v1.types import metastore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetastoreServiceClient._get_default_mtls_endpoint(None) is None - assert MetastoreServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MetastoreServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MetastoreServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - MetastoreServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert MetastoreServiceClient._get_client_cert_source(None, False) is None - assert MetastoreServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MetastoreServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MetastoreServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MetastoreServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert MetastoreServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MetastoreServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetastoreServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MetastoreServiceClient._get_api_endpoint(None, None, default_universe, "always") == MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetastoreServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetastoreServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MetastoreServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - MetastoreServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert MetastoreServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MetastoreServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MetastoreServiceClient._get_universe_domain(None, None) == MetastoreServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - MetastoreServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = MetastoreServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = MetastoreServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetastoreServiceClient, "grpc"), - (MetastoreServiceAsyncClient, "grpc_asyncio"), - (MetastoreServiceClient, "rest"), -]) -def test_metastore_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'biglake.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://biglake.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetastoreServiceGrpcTransport, "grpc"), - (transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MetastoreServiceRestTransport, "rest"), -]) -def test_metastore_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetastoreServiceClient, "grpc"), - (MetastoreServiceAsyncClient, "grpc_asyncio"), - (MetastoreServiceClient, "rest"), -]) -def test_metastore_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'biglake.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://biglake.googleapis.com' - ) - - -def test_metastore_service_client_get_transport_class(): - transport = MetastoreServiceClient.get_transport_class() - available_transports = [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceRestTransport, - ] - assert transport in available_transports - - transport = MetastoreServiceClient.get_transport_class("grpc") - assert transport == transports.MetastoreServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), -]) -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -def test_metastore_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetastoreServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetastoreServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", "true"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", "false"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest", "true"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metastore_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MetastoreServiceClient, MetastoreServiceAsyncClient -]) -@mock.patch.object(MetastoreServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetastoreServiceAsyncClient)) -def test_metastore_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - MetastoreServiceClient, MetastoreServiceAsyncClient -]) -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -def test_metastore_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), -]) -def test_metastore_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", grpc_helpers), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest", None), -]) -def test_metastore_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_metastore_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_biglake_v1.services.metastore_service.transports.MetastoreServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetastoreServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", grpc_helpers), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metastore_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "biglake.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="biglake.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateCatalogRequest, - dict, -]) -def test_create_catalog(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog( - name='name_value', - ) - response = client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -def test_create_catalog_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateCatalogRequest( - parent='parent_value', - catalog_id='catalog_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_catalog(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateCatalogRequest( - parent='parent_value', - catalog_id='catalog_id_value', - ) - -def test_create_catalog_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_catalog] = mock_rpc - request = {} - client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_catalog_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_catalog in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_catalog] = mock_rpc - - request = {} - await client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_catalog_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateCatalogRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - response = await client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_catalog_async_from_dict(): - await test_create_catalog_async(request_type=dict) - -def test_create_catalog_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateCatalogRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_catalog_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateCatalogRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - await client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_catalog_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_catalog( - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].catalog - mock_val = metastore.Catalog(name='name_value') - assert arg == mock_val - arg = args[0].catalog_id - mock_val = 'catalog_id_value' - assert arg == mock_val - - -def test_create_catalog_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_catalog( - metastore.CreateCatalogRequest(), - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - -@pytest.mark.asyncio -async def test_create_catalog_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_catalog( - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].catalog - mock_val = metastore.Catalog(name='name_value') - assert arg == mock_val - arg = args[0].catalog_id - mock_val = 'catalog_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_catalog_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_catalog( - metastore.CreateCatalogRequest(), - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteCatalogRequest, - dict, -]) -def test_delete_catalog(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog( - name='name_value', - ) - response = client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -def test_delete_catalog_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteCatalogRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_catalog(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteCatalogRequest( - name='name_value', - ) - -def test_delete_catalog_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_catalog] = mock_rpc - request = {} - client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_catalog_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_catalog in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_catalog] = mock_rpc - - request = {} - await client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_catalog_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteCatalogRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - response = await client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_catalog_async_from_dict(): - await test_delete_catalog_async(request_type=dict) - -def test_delete_catalog_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_catalog_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - await client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_catalog_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_catalog_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_catalog( - metastore.DeleteCatalogRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_catalog_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_catalog_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_catalog( - metastore.DeleteCatalogRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetCatalogRequest, - dict, -]) -def test_get_catalog(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog( - name='name_value', - ) - response = client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.GetCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -def test_get_catalog_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.GetCatalogRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_catalog(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetCatalogRequest( - name='name_value', - ) - -def test_get_catalog_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_catalog] = mock_rpc - request = {} - client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_catalog_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_catalog in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_catalog] = mock_rpc - - request = {} - await client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_catalog_async(transport: str = 'grpc_asyncio', request_type=metastore.GetCatalogRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - response = await client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.GetCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_catalog_async_from_dict(): - await test_get_catalog_async(request_type=dict) - -def test_get_catalog_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_catalog_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - await client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_catalog_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_catalog_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_catalog( - metastore.GetCatalogRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_catalog_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_catalog_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_catalog( - metastore.GetCatalogRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListCatalogsRequest, - dict, -]) -def test_list_catalogs(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListCatalogsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCatalogsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_catalogs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListCatalogsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_catalogs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListCatalogsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_catalogs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_catalogs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_catalogs] = mock_rpc - request = {} - client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_catalogs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_catalogs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_catalogs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_catalogs] = mock_rpc - - request = {} - await client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_catalogs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_catalogs_async(transport: str = 'grpc_asyncio', request_type=metastore.ListCatalogsRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListCatalogsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCatalogsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_catalogs_async_from_dict(): - await test_list_catalogs_async(request_type=dict) - -def test_list_catalogs_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListCatalogsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value = metastore.ListCatalogsResponse() - client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_catalogs_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListCatalogsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse()) - await client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_catalogs_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListCatalogsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_catalogs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_catalogs_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_catalogs( - metastore.ListCatalogsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_catalogs_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListCatalogsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_catalogs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_catalogs_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_catalogs( - metastore.ListCatalogsRequest(), - parent='parent_value', - ) - - -def test_list_catalogs_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_catalogs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Catalog) - for i in results) -def test_list_catalogs_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - pages = list(client.list_catalogs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_catalogs_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_catalogs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Catalog) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_catalogs_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_catalogs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metastore.CreateDatabaseRequest, - dict, -]) -def test_create_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_create_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - ) - -def test_create_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_database] = mock_rpc - - request = {} - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - -def test_create_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = metastore.Database() - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - - -def test_create_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - metastore.CreateDatabaseRequest(), - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - metastore.CreateDatabaseRequest(), - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteDatabaseRequest, - dict, -]) -def test_delete_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_delete_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteDatabaseRequest( - name='name_value', - ) - -def test_delete_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc - request = {} - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_database] = mock_rpc - - request = {} - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_database_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_delete_database_async_from_dict(): - await test_delete_database_async(request_type=dict) - -def test_delete_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = metastore.Database() - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - metastore.DeleteDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_database( - metastore.DeleteDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateDatabaseRequest, - dict, -]) -def test_update_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_update_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.UpdateDatabaseRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateDatabaseRequest( - ) - -def test_update_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_database] = mock_rpc - - request = {} - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=metastore.UpdateDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_update_database_async_from_dict(): - await test_update_database_async(request_type=dict) - -def test_update_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = metastore.Database() - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -def test_update_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database( - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - metastore.UpdateDatabaseRequest(), - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database( - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database( - metastore.UpdateDatabaseRequest(), - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetDatabaseRequest, - dict, -]) -def test_get_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_get_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.GetDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetDatabaseRequest( - name='name_value', - ) - -def test_get_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_database] = mock_rpc - - request = {} - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=metastore.GetDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - -def test_get_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = metastore.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - metastore.GetDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - metastore.GetDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListDatabasesRequest, - dict, -]) -def test_list_databases(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_databases_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_databases(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_databases_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_databases in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_databases] = mock_rpc - - request = {} - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=metastore.ListDatabasesRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - -def test_list_databases_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = metastore.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse()) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_databases_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_databases_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - metastore.ListDatabasesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - metastore.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_databases(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Database) - for i in results) -def test_list_databases_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_databases(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Database) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_databases_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_databases(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metastore.CreateTableRequest, - dict, -]) -def test_create_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_create_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateTableRequest( - parent='parent_value', - table_id='table_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateTableRequest( - parent='parent_value', - table_id='table_id_value', - ) - -def test_create_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_table] = mock_rpc - request = {} - client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_table] = mock_rpc - - request = {} - await client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_table_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_table_async_from_dict(): - await test_create_table_async(request_type=dict) - -def test_create_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateTableRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value = metastore.Table() - client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateTableRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_table( - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].table_id - mock_val = 'table_id_value' - assert arg == mock_val - - -def test_create_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_table( - metastore.CreateTableRequest(), - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - -@pytest.mark.asyncio -async def test_create_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_table( - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].table_id - mock_val = 'table_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_table( - metastore.CreateTableRequest(), - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteTableRequest, - dict, -]) -def test_delete_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_delete_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteTableRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteTableRequest( - name='name_value', - ) - -def test_delete_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_table] = mock_rpc - request = {} - client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_table] = mock_rpc - - request = {} - await client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_table_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_delete_table_async_from_dict(): - await test_delete_table_async(request_type=dict) - -def test_delete_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value = metastore.Table() - client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_table( - metastore.DeleteTableRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_table( - metastore.DeleteTableRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateTableRequest, - dict, -]) -def test_update_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.UpdateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_update_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.UpdateTableRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateTableRequest( - ) - -def test_update_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_table] = mock_rpc - request = {} - client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_table] = mock_rpc - - request = {} - await client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_table_async(transport: str = 'grpc_asyncio', request_type=metastore.UpdateTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.UpdateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_table_async_from_dict(): - await test_update_table_async(request_type=dict) - -def test_update_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateTableRequest() - - request.table.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value = metastore.Table() - client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'table.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateTableRequest() - - request.table.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'table.name=name_value', - ) in kw['metadata'] - - -def test_update_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_table( - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_table( - metastore.UpdateTableRequest(), - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_table( - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_table( - metastore.UpdateTableRequest(), - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.RenameTableRequest, - dict, -]) -def test_rename_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.RenameTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_rename_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.RenameTableRequest( - name='name_value', - new_name='new_name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RenameTableRequest( - name='name_value', - new_name='new_name_value', - ) - -def test_rename_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_table] = mock_rpc - request = {} - client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_table] = mock_rpc - - request = {} - await client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_table_async(transport: str = 'grpc_asyncio', request_type=metastore.RenameTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.RenameTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_rename_table_async_from_dict(): - await test_rename_table_async(request_type=dict) - -def test_rename_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.RenameTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value = metastore.Table() - client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.RenameTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_table( - name='name_value', - new_name='new_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_name - mock_val = 'new_name_value' - assert arg == mock_val - - -def test_rename_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_table( - metastore.RenameTableRequest(), - name='name_value', - new_name='new_name_value', - ) - -@pytest.mark.asyncio -async def test_rename_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_table( - name='name_value', - new_name='new_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_name - mock_val = 'new_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_table( - metastore.RenameTableRequest(), - name='name_value', - new_name='new_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetTableRequest, - dict, -]) -def test_get_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.GetTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_get_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.GetTableRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetTableRequest( - name='name_value', - ) - -def test_get_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table] = mock_rpc - request = {} - client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_table] = mock_rpc - - request = {} - await client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_async(transport: str = 'grpc_asyncio', request_type=metastore.GetTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.GetTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_table_async_from_dict(): - await test_get_table_async(request_type=dict) - -def test_get_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value = metastore.Table() - client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table( - metastore.GetTableRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_table( - metastore.GetTableRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListTablesRequest, - dict, -]) -def test_list_tables(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListTablesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListTablesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tables_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListTablesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_tables(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListTablesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_tables_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tables in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tables] = mock_rpc - request = {} - client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tables(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tables_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_tables in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_tables] = mock_rpc - - request = {} - await client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_tables(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tables_async(transport: str = 'grpc_asyncio', request_type=metastore.ListTablesRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListTablesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tables_async_from_dict(): - await test_list_tables_async(request_type=dict) - -def test_list_tables_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListTablesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value = metastore.ListTablesResponse() - client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tables_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListTablesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse()) - await client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tables_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListTablesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tables( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tables_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tables( - metastore.ListTablesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tables_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListTablesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tables( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tables_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tables( - metastore.ListTablesRequest(), - parent='parent_value', - ) - - -def test_list_tables_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tables(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Table) - for i in results) -def test_list_tables_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tables(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tables_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tables(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Table) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tables_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tables(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_catalog_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_catalog] = mock_rpc - - request = {} - client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_catalog_rest_required_fields(request_type=metastore.CreateCatalogRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["catalog_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "catalogId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "catalogId" in jsonified_request - assert jsonified_request["catalogId"] == request_init["catalog_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["catalogId"] = 'catalog_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_catalog._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("catalog_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "catalogId" in jsonified_request - assert jsonified_request["catalogId"] == 'catalog_id_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_catalog(request) - - expected_params = [ - ( - "catalogId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_catalog_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_catalog._get_unset_required_fields({}) - assert set(unset_fields) == (set(("catalogId", )) & set(("parent", "catalog", "catalogId", ))) - - -def test_create_catalog_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_catalog(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/catalogs" % client.transport._host, args[1]) - - -def test_create_catalog_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_catalog( - metastore.CreateCatalogRequest(), - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - -def test_delete_catalog_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_catalog] = mock_rpc - - request = {} - client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_catalog_rest_required_fields(request_type=metastore.DeleteCatalogRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_catalog(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_catalog_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_catalog._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_catalog_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_catalog(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*}" % client.transport._host, args[1]) - - -def test_delete_catalog_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_catalog( - metastore.DeleteCatalogRequest(), - name='name_value', - ) - - -def test_get_catalog_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_catalog] = mock_rpc - - request = {} - client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_catalog_rest_required_fields(request_type=metastore.GetCatalogRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_catalog(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_catalog_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_catalog._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_catalog_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_catalog(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*}" % client.transport._host, args[1]) - - -def test_get_catalog_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_catalog( - metastore.GetCatalogRequest(), - name='name_value', - ) - - -def test_list_catalogs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_catalogs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_catalogs] = mock_rpc - - request = {} - client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_catalogs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_catalogs_rest_required_fields(request_type=metastore.ListCatalogsRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_catalogs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_catalogs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListCatalogsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListCatalogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_catalogs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_catalogs_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_catalogs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_catalogs_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListCatalogsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListCatalogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_catalogs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/catalogs" % client.transport._host, args[1]) - - -def test_list_catalogs_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_catalogs( - metastore.ListCatalogsRequest(), - parent='parent_value', - ) - - -def test_list_catalogs_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListCatalogsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_catalogs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Catalog) - for i in results) - - pages = list(client.list_catalogs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_database_rest_required_fields(request_type=metastore.CreateDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "databaseId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == request_init["database_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["databaseId"] = 'database_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("database_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == 'database_id_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_database(request) - - expected_params = [ - ( - "databaseId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("databaseId", )) & set(("parent", "database", "databaseId", ))) - - -def test_create_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/catalogs/*}/databases" % client.transport._host, args[1]) - - -def test_create_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - metastore.CreateDatabaseRequest(), - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - -def test_delete_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc - - request = {} - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_database_rest_required_fields(request_type=metastore.DeleteDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*}" % client.transport._host, args[1]) - - -def test_delete_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - metastore.DeleteDatabaseRequest(), - name='name_value', - ) - - -def test_update_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_rest_required_fields(request_type=metastore.UpdateDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("database", ))) - - -def test_update_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{database.name=projects/*/locations/*/catalogs/*/databases/*}" % client.transport._host, args[1]) - - -def test_update_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - metastore.UpdateDatabaseRequest(), - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_rest_required_fields(request_type=metastore.GetDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*}" % client.transport._host, args[1]) - - -def test_get_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - metastore.GetDatabaseRequest(), - name='name_value', - ) - - -def test_list_databases_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_databases_rest_required_fields(request_type=metastore.ListDatabasesRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_databases(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_databases_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_databases_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListDatabasesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_databases(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/catalogs/*}/databases" % client.transport._host, args[1]) - - -def test_list_databases_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - metastore.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListDatabasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - - pager = client.list_databases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Database) - for i in results) - - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_table] = mock_rpc - - request = {} - client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_table_rest_required_fields(request_type=metastore.CreateTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["table_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "tableId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "tableId" in jsonified_request - assert jsonified_request["tableId"] == request_init["table_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["tableId"] = 'table_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_table._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("table_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "tableId" in jsonified_request - assert jsonified_request["tableId"] == 'table_id_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_table(request) - - expected_params = [ - ( - "tableId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(("tableId", )) & set(("parent", "table", "tableId", ))) - - -def test_create_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" % client.transport._host, args[1]) - - -def test_create_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_table( - metastore.CreateTableRequest(), - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - -def test_delete_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_table] = mock_rpc - - request = {} - client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_table_rest_required_fields(request_type=metastore.DeleteTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" % client.transport._host, args[1]) - - -def test_delete_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_table( - metastore.DeleteTableRequest(), - name='name_value', - ) - - -def test_update_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_table] = mock_rpc - - request = {} - client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_table_rest_required_fields(request_type=metastore.UpdateTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_table._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("table", ))) - - -def test_update_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'table': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'}} - - # get truthy value for each flattened field - mock_args = dict( - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" % client.transport._host, args[1]) - - -def test_update_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_table( - metastore.UpdateTableRequest(), - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_rename_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_table] = mock_rpc - - request = {} - client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_table_rest_required_fields(request_type=metastore.RenameTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newName"] = 'new_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newName" in jsonified_request - assert jsonified_request["newName"] == 'new_name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newName", ))) - - -def test_rename_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_name='new_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename" % client.transport._host, args[1]) - - -def test_rename_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_table( - metastore.RenameTableRequest(), - name='name_value', - new_name='new_name_value', - ) - - -def test_get_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table] = mock_rpc - - request = {} - client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_table_rest_required_fields(request_type=metastore.GetTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" % client.transport._host, args[1]) - - -def test_get_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table( - metastore.GetTableRequest(), - name='name_value', - ) - - -def test_list_tables_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tables in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tables] = mock_rpc - - request = {} - client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tables(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_tables_rest_required_fields(request_type=metastore.ListTablesRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tables._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tables._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListTablesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_tables(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_tables_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_tables._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "view", )) & set(("parent", ))) - - -def test_list_tables_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListTablesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_tables(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" % client.transport._host, args[1]) - - -def test_list_tables_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tables( - metastore.ListTablesRequest(), - parent='parent_value', - ) - - -def test_list_tables_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListTablesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - pager = client.list_tables(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Table) - for i in results) - - pages = list(client.list_tables(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetastoreServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetastoreServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceGrpcAsyncIOTransport, - transports.MetastoreServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = MetastoreServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_catalog_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.create_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_catalog_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.delete_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_catalog_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.get_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_catalogs_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value = metastore.ListCatalogsResponse() - client.list_catalogs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListCatalogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = metastore.Database() - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = metastore.Database() - client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = metastore.Database() - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = metastore.Database() - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = metastore.ListDatabasesResponse() - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value = metastore.Table() - client.create_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value = metastore.Table() - client.delete_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value = metastore.Table() - client.update_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value = metastore.Table() - client.rename_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.RenameTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value = metastore.Table() - client.get_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tables_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value = metastore.ListTablesResponse() - client.list_tables(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListTablesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = MetastoreServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_catalog_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - await client.create_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_catalog_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - await client.delete_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_catalog_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - await client.get_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_catalogs_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - )) - await client.list_catalogs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListCatalogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_databases_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - await client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.create_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.delete_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.update_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.rename_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.RenameTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.get_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tables_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse( - next_page_token='next_page_token_value', - )) - await client.list_tables(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListTablesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = MetastoreServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_catalog_rest_bad_request(request_type=metastore.CreateCatalogRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_catalog(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateCatalogRequest, - dict, -]) -def test_create_catalog_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["catalog"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateCatalogRequest.meta.fields["catalog"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["catalog"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["catalog"][field])): - del request_init["catalog"][field][i][subfield] - else: - del request_init["catalog"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_catalog(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_catalog_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_catalog") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_catalog_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_catalog") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateCatalogRequest.pb(metastore.CreateCatalogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Catalog.to_json(metastore.Catalog()) - req.return_value.content = return_value - - request = metastore.CreateCatalogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Catalog() - post_with_metadata.return_value = metastore.Catalog(), metadata - - client.create_catalog(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_catalog_rest_bad_request(request_type=metastore.DeleteCatalogRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_catalog(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteCatalogRequest, - dict, -]) -def test_delete_catalog_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_catalog(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_catalog_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_catalog") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_catalog_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_catalog") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.DeleteCatalogRequest.pb(metastore.DeleteCatalogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Catalog.to_json(metastore.Catalog()) - req.return_value.content = return_value - - request = metastore.DeleteCatalogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Catalog() - post_with_metadata.return_value = metastore.Catalog(), metadata - - client.delete_catalog(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_catalog_rest_bad_request(request_type=metastore.GetCatalogRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_catalog(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetCatalogRequest, - dict, -]) -def test_get_catalog_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_catalog(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_catalog_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_catalog") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_catalog_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_get_catalog") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.GetCatalogRequest.pb(metastore.GetCatalogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Catalog.to_json(metastore.Catalog()) - req.return_value.content = return_value - - request = metastore.GetCatalogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Catalog() - post_with_metadata.return_value = metastore.Catalog(), metadata - - client.get_catalog(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_catalogs_rest_bad_request(request_type=metastore.ListCatalogsRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_catalogs(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListCatalogsRequest, - dict, -]) -def test_list_catalogs_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListCatalogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_catalogs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCatalogsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_catalogs_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_catalogs") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_catalogs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_catalogs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListCatalogsRequest.pb(metastore.ListCatalogsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListCatalogsResponse.to_json(metastore.ListCatalogsResponse()) - req.return_value.content = return_value - - request = metastore.ListCatalogsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListCatalogsResponse() - post_with_metadata.return_value = metastore.ListCatalogsResponse(), metadata - - client.list_catalogs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_database_rest_bad_request(request_type=metastore.CreateDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateDatabaseRequest, - dict, -]) -def test_create_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request_init["database"] = {'hive_options': {'location_uri': 'location_uri_value', 'parameters': {}}, 'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateDatabaseRequest.pb(metastore.CreateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.CreateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_database_rest_bad_request(request_type=metastore.DeleteDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteDatabaseRequest, - dict, -]) -def test_delete_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.DeleteDatabaseRequest.pb(metastore.DeleteDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.DeleteDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.delete_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_rest_bad_request(request_type=metastore.UpdateDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateDatabaseRequest, - dict, -]) -def test_update_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'}} - request_init["database"] = {'hive_options': {'location_uri': 'location_uri_value', 'parameters': {}}, 'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_update_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.UpdateDatabaseRequest.pb(metastore.UpdateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.UpdateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_database_rest_bad_request(request_type=metastore.GetDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetDatabaseRequest, - dict, -]) -def test_get_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_get_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.GetDatabaseRequest.pb(metastore.GetDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.GetDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_databases_rest_bad_request(request_type=metastore.ListDatabasesRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListDatabasesRequest, - dict, -]) -def test_list_databases_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_databases") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_databases_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_databases") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListDatabasesRequest.pb(metastore.ListDatabasesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListDatabasesResponse.to_json(metastore.ListDatabasesResponse()) - req.return_value.content = return_value - - request = metastore.ListDatabasesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListDatabasesResponse() - post_with_metadata.return_value = metastore.ListDatabasesResponse(), metadata - - client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_table_rest_bad_request(request_type=metastore.CreateTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateTableRequest, - dict, -]) -def test_create_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request_init["table"] = {'hive_options': {'parameters': {}, 'table_type': 'table_type_value', 'storage_descriptor': {'location_uri': 'location_uri_value', 'input_format': 'input_format_value', 'output_format': 'output_format_value', 'serde_info': {'serialization_lib': 'serialization_lib_value'}}}, 'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateTableRequest.meta.fields["table"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["table"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["table"][field])): - del request_init["table"][field][i][subfield] - else: - del request_init["table"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateTableRequest.pb(metastore.CreateTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.CreateTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.create_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_table_rest_bad_request(request_type=metastore.DeleteTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteTableRequest, - dict, -]) -def test_delete_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.DeleteTableRequest.pb(metastore.DeleteTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.DeleteTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.delete_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_table_rest_bad_request(request_type=metastore.UpdateTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'table': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateTableRequest, - dict, -]) -def test_update_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'table': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'}} - request_init["table"] = {'hive_options': {'parameters': {}, 'table_type': 'table_type_value', 'storage_descriptor': {'location_uri': 'location_uri_value', 'input_format': 'input_format_value', 'output_format': 'output_format_value', 'serde_info': {'serialization_lib': 'serialization_lib_value'}}}, 'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.UpdateTableRequest.meta.fields["table"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["table"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["table"][field])): - del request_init["table"][field][i][subfield] - else: - del request_init["table"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_update_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.UpdateTableRequest.pb(metastore.UpdateTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.UpdateTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.update_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_table_rest_bad_request(request_type=metastore.RenameTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.RenameTableRequest, - dict, -]) -def test_rename_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_rename_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_rename_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_rename_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.RenameTableRequest.pb(metastore.RenameTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.RenameTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.rename_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_table_rest_bad_request(request_type=metastore.GetTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetTableRequest, - dict, -]) -def test_get_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_get_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.GetTableRequest.pb(metastore.GetTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.GetTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.get_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_tables_rest_bad_request(request_type=metastore.ListTablesRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_tables(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListTablesRequest, - dict, -]) -def test_list_tables_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListTablesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_tables(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tables_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_tables") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_tables_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_tables") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListTablesRequest.pb(metastore.ListTablesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListTablesResponse.to_json(metastore.ListTablesResponse()) - req.return_value.content = return_value - - request = metastore.ListTablesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListTablesResponse() - post_with_metadata.return_value = metastore.ListTablesResponse(), metadata - - client.list_tables(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_catalog_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - client.create_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_catalog_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - client.delete_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_catalog_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - client.get_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_catalogs_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - client.list_catalogs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListCatalogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - client.create_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - client.delete_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - client.update_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - client.rename_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.RenameTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - client.get_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tables_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - client.list_tables(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListTablesRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetastoreServiceGrpcTransport, - ) - -def test_metastore_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetastoreServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metastore_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_biglake_v1.services.metastore_service.transports.MetastoreServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetastoreServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_catalog', - 'delete_catalog', - 'get_catalog', - 'list_catalogs', - 'create_database', - 'delete_database', - 'update_database', - 'get_database', - 'list_databases', - 'create_table', - 'delete_table', - 'update_table', - 'rename_table', - 'get_table', - 'list_tables', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_metastore_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_biglake_v1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetastoreServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_metastore_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_biglake_v1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetastoreServiceTransport() - adc.assert_called_once() - - -def test_metastore_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetastoreServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceGrpcAsyncIOTransport, - ], -) -def test_metastore_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceGrpcAsyncIOTransport, - transports.MetastoreServiceRestTransport, - ], -) -def test_metastore_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetastoreServiceGrpcTransport, grpc_helpers), - (transports.MetastoreServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metastore_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "biglake.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="biglake.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetastoreServiceGrpcTransport, transports.MetastoreServiceGrpcAsyncIOTransport]) -def test_metastore_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_metastore_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MetastoreServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metastore_service_host_no_port(transport_name): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='biglake.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'biglake.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://biglake.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metastore_service_host_with_port(transport_name): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='biglake.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'biglake.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://biglake.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_metastore_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MetastoreServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = MetastoreServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_catalog._session - session2 = client2.transport.create_catalog._session - assert session1 != session2 - session1 = client1.transport.delete_catalog._session - session2 = client2.transport.delete_catalog._session - assert session1 != session2 - session1 = client1.transport.get_catalog._session - session2 = client2.transport.get_catalog._session - assert session1 != session2 - session1 = client1.transport.list_catalogs._session - session2 = client2.transport.list_catalogs._session - assert session1 != session2 - session1 = client1.transport.create_database._session - session2 = client2.transport.create_database._session - assert session1 != session2 - session1 = client1.transport.delete_database._session - session2 = client2.transport.delete_database._session - assert session1 != session2 - session1 = client1.transport.update_database._session - session2 = client2.transport.update_database._session - assert session1 != session2 - session1 = client1.transport.get_database._session - session2 = client2.transport.get_database._session - assert session1 != session2 - session1 = client1.transport.list_databases._session - session2 = client2.transport.list_databases._session - assert session1 != session2 - session1 = client1.transport.create_table._session - session2 = client2.transport.create_table._session - assert session1 != session2 - session1 = client1.transport.delete_table._session - session2 = client2.transport.delete_table._session - assert session1 != session2 - session1 = client1.transport.update_table._session - session2 = client2.transport.update_table._session - assert session1 != session2 - session1 = client1.transport.rename_table._session - session2 = client2.transport.rename_table._session - assert session1 != session2 - session1 = client1.transport.get_table._session - session2 = client2.transport.get_table._session - assert session1 != session2 - session1 = client1.transport.list_tables._session - session2 = client2.transport.list_tables._session - assert session1 != session2 -def test_metastore_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetastoreServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metastore_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetastoreServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetastoreServiceGrpcTransport, transports.MetastoreServiceGrpcAsyncIOTransport]) -def test_metastore_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetastoreServiceGrpcTransport, transports.MetastoreServiceGrpcAsyncIOTransport]) -def test_metastore_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_catalog_path(): - project = "squid" - location = "clam" - catalog = "whelk" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}".format(project=project, location=location, catalog=catalog, ) - actual = MetastoreServiceClient.catalog_path(project, location, catalog) - assert expected == actual - - -def test_parse_catalog_path(): - expected = { - "project": "octopus", - "location": "oyster", - "catalog": "nudibranch", - } - path = MetastoreServiceClient.catalog_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_catalog_path(path) - assert expected == actual - -def test_database_path(): - project = "cuttlefish" - location = "mussel" - catalog = "winkle" - database = "nautilus" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format(project=project, location=location, catalog=catalog, database=database, ) - actual = MetastoreServiceClient.database_path(project, location, catalog, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "scallop", - "location": "abalone", - "catalog": "squid", - "database": "clam", - } - path = MetastoreServiceClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_database_path(path) - assert expected == actual - -def test_table_path(): - project = "whelk" - location = "octopus" - catalog = "oyster" - database = "nudibranch" - table = "cuttlefish" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format(project=project, location=location, catalog=catalog, database=database, table=table, ) - actual = MetastoreServiceClient.table_path(project, location, catalog, database, table) - assert expected == actual - - -def test_parse_table_path(): - expected = { - "project": "mussel", - "location": "winkle", - "catalog": "nautilus", - "database": "scallop", - "table": "abalone", - } - path = MetastoreServiceClient.table_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_table_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetastoreServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = MetastoreServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetastoreServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = MetastoreServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetastoreServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = MetastoreServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = MetastoreServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = MetastoreServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetastoreServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = MetastoreServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetastoreServiceTransport, '_prep_wrapped_messages') as prep: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetastoreServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MetastoreServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/.coveragerc deleted file mode 100644 index a349e5f15a72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_biglake/__init__.py - google/cloud/bigquery_biglake/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/.flake8 b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/MANIFEST.in deleted file mode 100644 index 823bafea1cf6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_biglake *.py -recursive-include google/cloud/bigquery_biglake_v1alpha1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/README.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/README.rst deleted file mode 100644 index c531f2ff7bbd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Biglake API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Biglake API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/metastore_service.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/metastore_service.rst deleted file mode 100644 index ad8727cc527d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/metastore_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetastoreService ----------------------------------- - -.. automodule:: google.cloud.bigquery_biglake_v1alpha1.services.metastore_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/services_.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/services_.rst deleted file mode 100644 index 2364a8885eb9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Biglake v1alpha1 API -======================================================= -.. toctree:: - :maxdepth: 2 - - metastore_service diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/types_.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/types_.rst deleted file mode 100644 index 03791e4296d6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/bigquery_biglake_v1alpha1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Biglake v1alpha1 API -==================================================== - -.. automodule:: google.cloud.bigquery_biglake_v1alpha1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/conf.py deleted file mode 100644 index 8ea454bab3d6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-biglake documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-biglake" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-biglake-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-biglake.tex", - u"google-cloud-bigquery-biglake Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-biglake", - u"Google Cloud Bigquery Biglake Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-biglake", - u"google-cloud-bigquery-biglake Documentation", - author, - "google-cloud-bigquery-biglake", - "GAPIC library for Google Cloud Bigquery Biglake API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/index.rst deleted file mode 100644 index 0d084b152cf6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_biglake_v1alpha1/services_ - bigquery_biglake_v1alpha1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/__init__.py deleted file mode 100644 index a70ae27cf383..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/__init__.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_biglake import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.client import MetastoreServiceClient -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.async_client import MetastoreServiceAsyncClient - -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import Catalog -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import CheckLockRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import CreateCatalogRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import CreateDatabaseRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import CreateLockRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import CreateTableRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import Database -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import DeleteCatalogRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import DeleteDatabaseRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import DeleteLockRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import DeleteTableRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import GetCatalogRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import GetDatabaseRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import GetTableRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import HiveDatabaseOptions -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import HiveTableOptions -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListCatalogsRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListCatalogsResponse -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListDatabasesRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListDatabasesResponse -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListLocksRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListLocksResponse -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListTablesRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import ListTablesResponse -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import Lock -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import RenameTableRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import Table -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import UpdateDatabaseRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import UpdateTableRequest -from google.cloud.bigquery_biglake_v1alpha1.types.metastore import TableView - -__all__ = ('MetastoreServiceClient', - 'MetastoreServiceAsyncClient', - 'Catalog', - 'CheckLockRequest', - 'CreateCatalogRequest', - 'CreateDatabaseRequest', - 'CreateLockRequest', - 'CreateTableRequest', - 'Database', - 'DeleteCatalogRequest', - 'DeleteDatabaseRequest', - 'DeleteLockRequest', - 'DeleteTableRequest', - 'GetCatalogRequest', - 'GetDatabaseRequest', - 'GetTableRequest', - 'HiveDatabaseOptions', - 'HiveTableOptions', - 'ListCatalogsRequest', - 'ListCatalogsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'ListLocksRequest', - 'ListLocksResponse', - 'ListTablesRequest', - 'ListTablesResponse', - 'Lock', - 'RenameTableRequest', - 'Table', - 'UpdateDatabaseRequest', - 'UpdateTableRequest', - 'TableView', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/py.typed b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/py.typed deleted file mode 100644 index 70e9a3b83398..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-biglake package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/__init__.py deleted file mode 100644 index 1e115365b185..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_biglake_v1alpha1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.metastore_service import MetastoreServiceClient -from .services.metastore_service import MetastoreServiceAsyncClient - -from .types.metastore import Catalog -from .types.metastore import CheckLockRequest -from .types.metastore import CreateCatalogRequest -from .types.metastore import CreateDatabaseRequest -from .types.metastore import CreateLockRequest -from .types.metastore import CreateTableRequest -from .types.metastore import Database -from .types.metastore import DeleteCatalogRequest -from .types.metastore import DeleteDatabaseRequest -from .types.metastore import DeleteLockRequest -from .types.metastore import DeleteTableRequest -from .types.metastore import GetCatalogRequest -from .types.metastore import GetDatabaseRequest -from .types.metastore import GetTableRequest -from .types.metastore import HiveDatabaseOptions -from .types.metastore import HiveTableOptions -from .types.metastore import ListCatalogsRequest -from .types.metastore import ListCatalogsResponse -from .types.metastore import ListDatabasesRequest -from .types.metastore import ListDatabasesResponse -from .types.metastore import ListLocksRequest -from .types.metastore import ListLocksResponse -from .types.metastore import ListTablesRequest -from .types.metastore import ListTablesResponse -from .types.metastore import Lock -from .types.metastore import RenameTableRequest -from .types.metastore import Table -from .types.metastore import UpdateDatabaseRequest -from .types.metastore import UpdateTableRequest -from .types.metastore import TableView - -__all__ = ( - 'MetastoreServiceAsyncClient', -'Catalog', -'CheckLockRequest', -'CreateCatalogRequest', -'CreateDatabaseRequest', -'CreateLockRequest', -'CreateTableRequest', -'Database', -'DeleteCatalogRequest', -'DeleteDatabaseRequest', -'DeleteLockRequest', -'DeleteTableRequest', -'GetCatalogRequest', -'GetDatabaseRequest', -'GetTableRequest', -'HiveDatabaseOptions', -'HiveTableOptions', -'ListCatalogsRequest', -'ListCatalogsResponse', -'ListDatabasesRequest', -'ListDatabasesResponse', -'ListLocksRequest', -'ListLocksResponse', -'ListTablesRequest', -'ListTablesResponse', -'Lock', -'MetastoreServiceClient', -'RenameTableRequest', -'Table', -'TableView', -'UpdateDatabaseRequest', -'UpdateTableRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/gapic_metadata.json deleted file mode 100644 index ced0841886ac..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/gapic_metadata.json +++ /dev/null @@ -1,313 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_biglake_v1alpha1", - "protoPackage": "google.cloud.bigquery.biglake.v1alpha1", - "schema": "1.0", - "services": { - "MetastoreService": { - "clients": { - "grpc": { - "libraryClient": "MetastoreServiceClient", - "rpcs": { - "CheckLock": { - "methods": [ - "check_lock" - ] - }, - "CreateCatalog": { - "methods": [ - "create_catalog" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateLock": { - "methods": [ - "create_lock" - ] - }, - "CreateTable": { - "methods": [ - "create_table" - ] - }, - "DeleteCatalog": { - "methods": [ - "delete_catalog" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteLock": { - "methods": [ - "delete_lock" - ] - }, - "DeleteTable": { - "methods": [ - "delete_table" - ] - }, - "GetCatalog": { - "methods": [ - "get_catalog" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetTable": { - "methods": [ - "get_table" - ] - }, - "ListCatalogs": { - "methods": [ - "list_catalogs" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListLocks": { - "methods": [ - "list_locks" - ] - }, - "ListTables": { - "methods": [ - "list_tables" - ] - }, - "RenameTable": { - "methods": [ - "rename_table" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateTable": { - "methods": [ - "update_table" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetastoreServiceAsyncClient", - "rpcs": { - "CheckLock": { - "methods": [ - "check_lock" - ] - }, - "CreateCatalog": { - "methods": [ - "create_catalog" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateLock": { - "methods": [ - "create_lock" - ] - }, - "CreateTable": { - "methods": [ - "create_table" - ] - }, - "DeleteCatalog": { - "methods": [ - "delete_catalog" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteLock": { - "methods": [ - "delete_lock" - ] - }, - "DeleteTable": { - "methods": [ - "delete_table" - ] - }, - "GetCatalog": { - "methods": [ - "get_catalog" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetTable": { - "methods": [ - "get_table" - ] - }, - "ListCatalogs": { - "methods": [ - "list_catalogs" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListLocks": { - "methods": [ - "list_locks" - ] - }, - "ListTables": { - "methods": [ - "list_tables" - ] - }, - "RenameTable": { - "methods": [ - "rename_table" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateTable": { - "methods": [ - "update_table" - ] - } - } - }, - "rest": { - "libraryClient": "MetastoreServiceClient", - "rpcs": { - "CheckLock": { - "methods": [ - "check_lock" - ] - }, - "CreateCatalog": { - "methods": [ - "create_catalog" - ] - }, - "CreateDatabase": { - "methods": [ - "create_database" - ] - }, - "CreateLock": { - "methods": [ - "create_lock" - ] - }, - "CreateTable": { - "methods": [ - "create_table" - ] - }, - "DeleteCatalog": { - "methods": [ - "delete_catalog" - ] - }, - "DeleteDatabase": { - "methods": [ - "delete_database" - ] - }, - "DeleteLock": { - "methods": [ - "delete_lock" - ] - }, - "DeleteTable": { - "methods": [ - "delete_table" - ] - }, - "GetCatalog": { - "methods": [ - "get_catalog" - ] - }, - "GetDatabase": { - "methods": [ - "get_database" - ] - }, - "GetTable": { - "methods": [ - "get_table" - ] - }, - "ListCatalogs": { - "methods": [ - "list_catalogs" - ] - }, - "ListDatabases": { - "methods": [ - "list_databases" - ] - }, - "ListLocks": { - "methods": [ - "list_locks" - ] - }, - "ListTables": { - "methods": [ - "list_tables" - ] - }, - "RenameTable": { - "methods": [ - "rename_table" - ] - }, - "UpdateDatabase": { - "methods": [ - "update_database" - ] - }, - "UpdateTable": { - "methods": [ - "update_table" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/py.typed b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/py.typed deleted file mode 100644 index 70e9a3b83398..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-biglake package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/__init__.py deleted file mode 100644 index 3e854a1eaae5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetastoreServiceClient -from .async_client import MetastoreServiceAsyncClient - -__all__ = ( - 'MetastoreServiceClient', - 'MetastoreServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py deleted file mode 100644 index e51744ea2c77..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py +++ /dev/null @@ -1,2457 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_biglake_v1alpha1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service import pagers -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport -from .client import MetastoreServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class MetastoreServiceAsyncClient: - """BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - """ - - _client: MetastoreServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MetastoreServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = MetastoreServiceClient._DEFAULT_UNIVERSE - - catalog_path = staticmethod(MetastoreServiceClient.catalog_path) - parse_catalog_path = staticmethod(MetastoreServiceClient.parse_catalog_path) - database_path = staticmethod(MetastoreServiceClient.database_path) - parse_database_path = staticmethod(MetastoreServiceClient.parse_database_path) - lock_path = staticmethod(MetastoreServiceClient.lock_path) - parse_lock_path = staticmethod(MetastoreServiceClient.parse_lock_path) - table_path = staticmethod(MetastoreServiceClient.table_path) - parse_table_path = staticmethod(MetastoreServiceClient.parse_table_path) - common_billing_account_path = staticmethod(MetastoreServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetastoreServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MetastoreServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MetastoreServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MetastoreServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MetastoreServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MetastoreServiceClient.common_project_path) - parse_common_project_path = staticmethod(MetastoreServiceClient.parse_common_project_path) - common_location_path = staticmethod(MetastoreServiceClient.common_location_path) - parse_common_location_path = staticmethod(MetastoreServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceAsyncClient: The constructed client. - """ - return MetastoreServiceClient.from_service_account_info.__func__(MetastoreServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceAsyncClient: The constructed client. - """ - return MetastoreServiceClient.from_service_account_file.__func__(MetastoreServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MetastoreServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MetastoreServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetastoreServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = MetastoreServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetastoreServiceTransport, Callable[..., MetastoreServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metastore service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetastoreServiceTransport,Callable[..., MetastoreServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetastoreServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetastoreServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "credentialsType": None, - } - ) - - async def create_catalog(self, - request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, - *, - parent: Optional[str] = None, - catalog: Optional[metastore.Catalog] = None, - catalog_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Creates a new catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = await client.create_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateCatalogRequest, dict]]): - The request object. Request message for the CreateCatalog - method. - parent (:class:`str`): - Required. The parent resource where this catalog will be - created. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog (:class:`google.cloud.bigquery_biglake_v1alpha1.types.Catalog`): - Required. The catalog to create. The ``name`` field does - not need to be provided. - - This corresponds to the ``catalog`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog_id (:class:`str`): - Required. The ID to use for the - catalog, which will become the final - component of the catalog's resource - name. - - This corresponds to the ``catalog_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, catalog, catalog_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateCatalogRequest): - request = metastore.CreateCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if catalog is not None: - request.catalog = catalog - if catalog_id is not None: - request.catalog_id = catalog_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_catalog(self, - request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Deletes an existing catalog specified by the catalog - ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteCatalogRequest, dict]]): - The request object. Request message for the DeleteCatalog - method. - name (:class:`str`): - Required. The name of the catalog to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteCatalogRequest): - request = metastore.DeleteCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_catalog(self, - request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Gets the catalog specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.get_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.GetCatalogRequest, dict]]): - The request object. Request message for the GetCatalog - method. - name (:class:`str`): - Required. The name of the catalog to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetCatalogRequest): - request = metastore.GetCatalogRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_catalogs(self, - request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCatalogsAsyncPager: - r"""List all catalogs in a specified project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsRequest, dict]]): - The request object. Request message for the ListCatalogs - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - catalogs. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsAsyncPager: - Response message for the ListCatalogs - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListCatalogsRequest): - request = metastore.ListCatalogsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_catalogs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListCatalogsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_database(self, - request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[metastore.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Creates a new database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_create_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = await client.create_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateDatabaseRequest, dict]]): - The request object. Request message for the - CreateDatabase method. - parent (:class:`str`): - Required. The parent resource where this database will - be created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (:class:`google.cloud.bigquery_biglake_v1alpha1.types.Database`): - Required. The database to create. The ``name`` field - does not need to be provided. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (:class:`str`): - Required. The ID to use for the - database, which will become the final - component of the database's resource - name. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database, database_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateDatabaseRequest): - request = metastore.CreateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_database(self, - request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Deletes an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteDatabaseRequest, dict]]): - The request object. Request message for the - DeleteDatabase method. - name (:class:`str`): - Required. The name of the database to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteDatabaseRequest): - request = metastore.DeleteDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_database(self, - request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[metastore.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Updates an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_update_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateDatabaseRequest( - ) - - # Make the request - response = await client.update_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.UpdateDatabaseRequest, dict]]): - The request object. Request message for the - UpdateDatabase method. - database (:class:`google.cloud.bigquery_biglake_v1alpha1.types.Database`): - Required. The database to update. - - The database's ``name`` field is used to identify the - database to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateDatabaseRequest): - request = metastore.UpdateDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_database(self, - request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Gets the database specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_get_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.GetDatabaseRequest, dict]]): - The request object. Request message for the GetDatabase - method. - name (:class:`str`): - Required. The name of the database to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetDatabaseRequest): - request = metastore.GetDatabaseRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_databases(self, - request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesAsyncPager: - r"""List all databases in a specified catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesRequest, dict]]): - The request object. Request message for the ListDatabases - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - databases. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesAsyncPager: - Response message for the - ListDatabases method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListDatabasesRequest): - request = metastore.ListDatabasesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDatabasesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_table(self, - request: Optional[Union[metastore.CreateTableRequest, dict]] = None, - *, - parent: Optional[str] = None, - table: Optional[metastore.Table] = None, - table_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Creates a new table. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_create_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = await client.create_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateTableRequest, dict]]): - The request object. Request message for the CreateTable - method. - parent (:class:`str`): - Required. The parent resource where this table will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table (:class:`google.cloud.bigquery_biglake_v1alpha1.types.Table`): - Required. The table to create. The ``name`` field does - not need to be provided for the table creation. - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table_id (:class:`str`): - Required. The ID to use for the - table, which will become the final - component of the table's resource name. - - This corresponds to the ``table_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, table, table_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateTableRequest): - request = metastore.CreateTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if table is not None: - request.table = table - if table_id is not None: - request.table_id = table_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_table(self, - request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Deletes an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteTableRequest, dict]]): - The request object. Request message for the DeleteTable - method. - name (:class:`str`): - Required. The name of the table to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteTableRequest): - request = metastore.DeleteTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_table(self, - request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, - *, - table: Optional[metastore.Table] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Updates an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_update_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateTableRequest( - ) - - # Make the request - response = await client.update_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.UpdateTableRequest, dict]]): - The request object. Request message for the UpdateTable - method. - table (:class:`google.cloud.bigquery_biglake_v1alpha1.types.Table`): - Required. The table to update. - - The table's ``name`` field is used to identify the table - to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [table, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateTableRequest): - request = metastore.UpdateTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if table is not None: - request.table = table - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("table.name", request.table.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_table(self, - request: Optional[Union[metastore.RenameTableRequest, dict]] = None, - *, - name: Optional[str] = None, - new_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Renames an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = await client.rename_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.RenameTableRequest, dict]]): - The request object. Request message for the RenameTable - method in MetastoreService - name (:class:`str`): - Required. The table's ``name`` field is used to identify - the table to rename. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_name (:class:`str`): - Required. The new ``name`` for the specified table, must - be in the same database. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``new_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.RenameTableRequest): - request = metastore.RenameTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_name is not None: - request.new_name = new_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_table(self, - request: Optional[Union[metastore.GetTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Gets the table specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_get_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.GetTableRequest, dict]]): - The request object. Request message for the GetTable - method. - name (:class:`str`): - Required. The name of the table to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetTableRequest): - request = metastore.GetTableRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_tables(self, - request: Optional[Union[metastore.ListTablesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTablesAsyncPager: - r"""List all tables in a specified database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.ListTablesRequest, dict]]): - The request object. Request message for the ListTables - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - tables. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListTablesAsyncPager: - Response message for the ListTables - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListTablesRequest): - request = metastore.ListTablesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_tables] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTablesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_lock(self, - request: Optional[Union[metastore.CreateLockRequest, dict]] = None, - *, - parent: Optional[str] = None, - lock: Optional[metastore.Lock] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Lock: - r"""Creates a new lock. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_create_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - lock = bigquery_biglake_v1alpha1.Lock() - lock.table_id = "table_id_value" - - request = bigquery_biglake_v1alpha1.CreateLockRequest( - parent="parent_value", - lock=lock, - ) - - # Make the request - response = await client.create_lock(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateLockRequest, dict]]): - The request object. Request message for the CreateLock - method. - parent (:class:`str`): - Required. The parent resource where this lock will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lock (:class:`google.cloud.bigquery_biglake_v1alpha1.types.Lock`): - Required. The lock to create. The ``name`` field does - not need to be provided for the lock creation. - - This corresponds to the ``lock`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Lock: - Represents a lock. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, lock] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateLockRequest): - request = metastore.CreateLockRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lock is not None: - request.lock = lock - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_lock] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_lock(self, - request: Optional[Union[metastore.DeleteLockRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing lock specified by the lock ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_delete_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteLockRequest( - name="name_value", - ) - - # Make the request - await client.delete_lock(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteLockRequest, dict]]): - The request object. Request message for the DeleteLock - method. - name (:class:`str`): - Required. The name of the lock to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteLockRequest): - request = metastore.DeleteLockRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_lock] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def check_lock(self, - request: Optional[Union[metastore.CheckLockRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Lock: - r"""Checks the state of a lock specified by the lock ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_check_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CheckLockRequest( - name="name_value", - ) - - # Make the request - response = await client.check_lock(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.CheckLockRequest, dict]]): - The request object. Request message for the CheckLock - method. - name (:class:`str`): - Required. The name of the lock to check. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Lock: - Represents a lock. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CheckLockRequest): - request = metastore.CheckLockRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.check_lock] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_locks(self, - request: Optional[Union[metastore.ListLocksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLocksAsyncPager: - r"""List all locks in a specified database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - async def sample_list_locks(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListLocksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_locks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_biglake_v1alpha1.types.ListLocksRequest, dict]]): - The request object. Request message for the ListLocks - method. - parent (:class:`str`): - Required. The parent, which owns this collection of - locks. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListLocksAsyncPager: - Response message for the ListLocks - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListLocksRequest): - request = metastore.ListLocksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_locks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListLocksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "MetastoreServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetastoreServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py deleted file mode 100644 index 363479d9871e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py +++ /dev/null @@ -1,2827 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_biglake_v1alpha1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service import pagers -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetastoreServiceGrpcTransport -from .transports.grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport -from .transports.rest import MetastoreServiceRestTransport - - -class MetastoreServiceClientMeta(type): - """Metaclass for the MetastoreService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetastoreServiceTransport]] - _transport_registry["grpc"] = MetastoreServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MetastoreServiceGrpcAsyncIOTransport - _transport_registry["rest"] = MetastoreServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetastoreServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetastoreServiceClient(metaclass=MetastoreServiceClientMeta): - """BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "biglake.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "biglake.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetastoreServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetastoreServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MetastoreServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def catalog_path(project: str,location: str,catalog: str,) -> str: - """Returns a fully-qualified catalog string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}".format(project=project, location=location, catalog=catalog, ) - - @staticmethod - def parse_catalog_path(path: str) -> Dict[str,str]: - """Parses a catalog path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def database_path(project: str,location: str,catalog: str,database: str,) -> str: - """Returns a fully-qualified database string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format(project=project, location=location, catalog=catalog, database=database, ) - - @staticmethod - def parse_database_path(path: str) -> Dict[str,str]: - """Parses a database path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def lock_path(project: str,location: str,catalog: str,database: str,lock: str,) -> str: - """Returns a fully-qualified lock string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}".format(project=project, location=location, catalog=catalog, database=database, lock=lock, ) - - @staticmethod - def parse_lock_path(path: str) -> Dict[str,str]: - """Parses a lock path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)/locks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def table_path(project: str,location: str,catalog: str,database: str,table: str,) -> str: - """Returns a fully-qualified table string.""" - return "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format(project=project, location=location, catalog=catalog, database=database, table=table, ) - - @staticmethod - def parse_table_path(path: str) -> Dict[str,str]: - """Parses a table path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/databases/(?P.+?)/tables/(?P
.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = MetastoreServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetastoreServiceTransport, Callable[..., MetastoreServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metastore service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MetastoreServiceTransport,Callable[..., MetastoreServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MetastoreServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetastoreServiceClient._read_environment_variables() - self._client_cert_source = MetastoreServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MetastoreServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MetastoreServiceTransport) - if transport_provided: - # transport is a MetastoreServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(MetastoreServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - MetastoreServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[MetastoreServiceTransport], Callable[..., MetastoreServiceTransport]] = ( - MetastoreServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., MetastoreServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "credentialsType": None, - } - ) - - def create_catalog(self, - request: Optional[Union[metastore.CreateCatalogRequest, dict]] = None, - *, - parent: Optional[str] = None, - catalog: Optional[metastore.Catalog] = None, - catalog_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Creates a new catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = client.create_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateCatalogRequest, dict]): - The request object. Request message for the CreateCatalog - method. - parent (str): - Required. The parent resource where this catalog will be - created. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog (google.cloud.bigquery_biglake_v1alpha1.types.Catalog): - Required. The catalog to create. The ``name`` field does - not need to be provided. - - This corresponds to the ``catalog`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - catalog_id (str): - Required. The ID to use for the - catalog, which will become the final - component of the catalog's resource - name. - - This corresponds to the ``catalog_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, catalog, catalog_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateCatalogRequest): - request = metastore.CreateCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if catalog is not None: - request.catalog = catalog - if catalog_id is not None: - request.catalog_id = catalog_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_catalog(self, - request: Optional[Union[metastore.DeleteCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Deletes an existing catalog specified by the catalog - ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.delete_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteCatalogRequest, dict]): - The request object. Request message for the DeleteCatalog - method. - name (str): - Required. The name of the catalog to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteCatalogRequest): - request = metastore.DeleteCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_catalog(self, - request: Optional[Union[metastore.GetCatalogRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Catalog: - r"""Gets the catalog specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.get_catalog(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.GetCatalogRequest, dict]): - The request object. Request message for the GetCatalog - method. - name (str): - Required. The name of the catalog to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Catalog: - Catalog is the container of - databases. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetCatalogRequest): - request = metastore.GetCatalogRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_catalog] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_catalogs(self, - request: Optional[Union[metastore.ListCatalogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCatalogsPager: - r"""List all catalogs in a specified project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsRequest, dict]): - The request object. Request message for the ListCatalogs - method. - parent (str): - Required. The parent, which owns this collection of - catalogs. Format: - projects/{project_id_or_number}/locations/{location_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsPager: - Response message for the ListCatalogs - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListCatalogsRequest): - request = metastore.ListCatalogsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_catalogs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListCatalogsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_database(self, - request: Optional[Union[metastore.CreateDatabaseRequest, dict]] = None, - *, - parent: Optional[str] = None, - database: Optional[metastore.Database] = None, - database_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Creates a new database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_create_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = client.create_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateDatabaseRequest, dict]): - The request object. Request message for the - CreateDatabase method. - parent (str): - Required. The parent resource where this database will - be created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database (google.cloud.bigquery_biglake_v1alpha1.types.Database): - Required. The database to create. The ``name`` field - does not need to be provided. - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - database_id (str): - Required. The ID to use for the - database, which will become the final - component of the database's resource - name. - - This corresponds to the ``database_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, database, database_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateDatabaseRequest): - request = metastore.CreateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if database is not None: - request.database = database - if database_id is not None: - request.database_id = database_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_database(self, - request: Optional[Union[metastore.DeleteDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Deletes an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.delete_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteDatabaseRequest, dict]): - The request object. Request message for the - DeleteDatabase method. - name (str): - Required. The name of the database to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteDatabaseRequest): - request = metastore.DeleteDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_database(self, - request: Optional[Union[metastore.UpdateDatabaseRequest, dict]] = None, - *, - database: Optional[metastore.Database] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Updates an existing database specified by the - database ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_update_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateDatabaseRequest( - ) - - # Make the request - response = client.update_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.UpdateDatabaseRequest, dict]): - The request object. Request message for the - UpdateDatabase method. - database (google.cloud.bigquery_biglake_v1alpha1.types.Database): - Required. The database to update. - - The database's ``name`` field is used to identify the - database to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``database`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [database, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateDatabaseRequest): - request = metastore.UpdateDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if database is not None: - request.database = database - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("database.name", request.database.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_database(self, - request: Optional[Union[metastore.GetDatabaseRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Database: - r"""Gets the database specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_get_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.GetDatabaseRequest, dict]): - The request object. Request message for the GetDatabase - method. - name (str): - Required. The name of the database to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Database: - Database is the container of tables. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetDatabaseRequest): - request = metastore.GetDatabaseRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_database] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_databases(self, - request: Optional[Union[metastore.ListDatabasesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDatabasesPager: - r"""List all databases in a specified catalog. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesRequest, dict]): - The request object. Request message for the ListDatabases - method. - parent (str): - Required. The parent, which owns this collection of - databases. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesPager: - Response message for the - ListDatabases method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListDatabasesRequest): - request = metastore.ListDatabasesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_databases] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDatabasesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_table(self, - request: Optional[Union[metastore.CreateTableRequest, dict]] = None, - *, - parent: Optional[str] = None, - table: Optional[metastore.Table] = None, - table_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Creates a new table. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_create_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = client.create_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateTableRequest, dict]): - The request object. Request message for the CreateTable - method. - parent (str): - Required. The parent resource where this table will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table (google.cloud.bigquery_biglake_v1alpha1.types.Table): - Required. The table to create. The ``name`` field does - not need to be provided for the table creation. - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - table_id (str): - Required. The ID to use for the - table, which will become the final - component of the table's resource name. - - This corresponds to the ``table_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, table, table_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateTableRequest): - request = metastore.CreateTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if table is not None: - request.table = table - if table_id is not None: - request.table_id = table_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_table(self, - request: Optional[Union[metastore.DeleteTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Deletes an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = client.delete_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteTableRequest, dict]): - The request object. Request message for the DeleteTable - method. - name (str): - Required. The name of the table to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteTableRequest): - request = metastore.DeleteTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_table(self, - request: Optional[Union[metastore.UpdateTableRequest, dict]] = None, - *, - table: Optional[metastore.Table] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Updates an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_update_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateTableRequest( - ) - - # Make the request - response = client.update_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.UpdateTableRequest, dict]): - The request object. Request message for the UpdateTable - method. - table (google.cloud.bigquery_biglake_v1alpha1.types.Table): - Required. The table to update. - - The table's ``name`` field is used to identify the table - to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``table`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [table, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.UpdateTableRequest): - request = metastore.UpdateTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if table is not None: - request.table = table - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("table.name", request.table.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_table(self, - request: Optional[Union[metastore.RenameTableRequest, dict]] = None, - *, - name: Optional[str] = None, - new_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Renames an existing table specified by the table ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = client.rename_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.RenameTableRequest, dict]): - The request object. Request message for the RenameTable - method in MetastoreService - name (str): - Required. The table's ``name`` field is used to identify - the table to rename. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_name (str): - Required. The new ``name`` for the specified table, must - be in the same database. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``new_name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.RenameTableRequest): - request = metastore.RenameTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_name is not None: - request.new_name = new_name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_table(self, - request: Optional[Union[metastore.GetTableRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Table: - r"""Gets the table specified by the resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_get_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = client.get_table(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.GetTableRequest, dict]): - The request object. Request message for the GetTable - method. - name (str): - Required. The name of the table to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Table: - Represents a table. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.GetTableRequest): - request = metastore.GetTableRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_table] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_tables(self, - request: Optional[Union[metastore.ListTablesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTablesPager: - r"""List all tables in a specified database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.ListTablesRequest, dict]): - The request object. Request message for the ListTables - method. - parent (str): - Required. The parent, which owns this collection of - tables. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListTablesPager: - Response message for the ListTables - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListTablesRequest): - request = metastore.ListTablesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_tables] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTablesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_lock(self, - request: Optional[Union[metastore.CreateLockRequest, dict]] = None, - *, - parent: Optional[str] = None, - lock: Optional[metastore.Lock] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Lock: - r"""Creates a new lock. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_create_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - lock = bigquery_biglake_v1alpha1.Lock() - lock.table_id = "table_id_value" - - request = bigquery_biglake_v1alpha1.CreateLockRequest( - parent="parent_value", - lock=lock, - ) - - # Make the request - response = client.create_lock(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.CreateLockRequest, dict]): - The request object. Request message for the CreateLock - method. - parent (str): - Required. The parent resource where this lock will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - lock (google.cloud.bigquery_biglake_v1alpha1.types.Lock): - Required. The lock to create. The ``name`` field does - not need to be provided for the lock creation. - - This corresponds to the ``lock`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Lock: - Represents a lock. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, lock] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CreateLockRequest): - request = metastore.CreateLockRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if lock is not None: - request.lock = lock - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_lock] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_lock(self, - request: Optional[Union[metastore.DeleteLockRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing lock specified by the lock ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_delete_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteLockRequest( - name="name_value", - ) - - # Make the request - client.delete_lock(request=request) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.DeleteLockRequest, dict]): - The request object. Request message for the DeleteLock - method. - name (str): - Required. The name of the lock to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.DeleteLockRequest): - request = metastore.DeleteLockRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_lock] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def check_lock(self, - request: Optional[Union[metastore.CheckLockRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> metastore.Lock: - r"""Checks the state of a lock specified by the lock ID. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_check_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CheckLockRequest( - name="name_value", - ) - - # Make the request - response = client.check_lock(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.CheckLockRequest, dict]): - The request object. Request message for the CheckLock - method. - name (str): - Required. The name of the lock to check. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.types.Lock: - Represents a lock. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.CheckLockRequest): - request = metastore.CheckLockRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.check_lock] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_locks(self, - request: Optional[Union[metastore.ListLocksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLocksPager: - r"""List all locks in a specified database. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_biglake_v1alpha1 - - def sample_list_locks(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListLocksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_locks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_biglake_v1alpha1.types.ListLocksRequest, dict]): - The request object. Request message for the ListLocks - method. - parent (str): - Required. The parent, which owns this collection of - locks. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListLocksPager: - Response message for the ListLocks - method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, metastore.ListLocksRequest): - request = metastore.ListLocksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListLocksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "MetastoreServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetastoreServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/pagers.py deleted file mode 100644 index 5885c6bccac3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/pagers.py +++ /dev/null @@ -1,583 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_biglake_v1alpha1.types import metastore - - -class ListCatalogsPager: - """A pager for iterating through ``list_catalogs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``catalogs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListCatalogs`` requests and continue to iterate - through the ``catalogs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListCatalogsResponse], - request: metastore.ListCatalogsRequest, - response: metastore.ListCatalogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListCatalogsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListCatalogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Catalog]: - for page in self.pages: - yield from page.catalogs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListCatalogsAsyncPager: - """A pager for iterating through ``list_catalogs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``catalogs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListCatalogs`` requests and continue to iterate - through the ``catalogs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListCatalogsResponse]], - request: metastore.ListCatalogsRequest, - response: metastore.ListCatalogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListCatalogsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListCatalogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Catalog]: - async def async_generator(): - async for page in self.pages: - for response in page.catalogs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListDatabasesResponse], - request: metastore.ListDatabasesRequest, - response: metastore.ListDatabasesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Database]: - for page in self.pages: - yield from page.databases - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDatabasesAsyncPager: - """A pager for iterating through ``list_databases`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``databases`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDatabases`` requests and continue to iterate - through the ``databases`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListDatabasesResponse]], - request: metastore.ListDatabasesRequest, - response: metastore.ListDatabasesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListDatabasesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListDatabasesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Database]: - async def async_generator(): - async for page in self.pages: - for response in page.databases: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTablesPager: - """A pager for iterating through ``list_tables`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListTablesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``tables`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTables`` requests and continue to iterate - through the ``tables`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListTablesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListTablesResponse], - request: metastore.ListTablesRequest, - response: metastore.ListTablesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListTablesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListTablesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListTablesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListTablesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Table]: - for page in self.pages: - yield from page.tables - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTablesAsyncPager: - """A pager for iterating through ``list_tables`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListTablesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``tables`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTables`` requests and continue to iterate - through the ``tables`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListTablesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListTablesResponse]], - request: metastore.ListTablesRequest, - response: metastore.ListTablesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListTablesRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListTablesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListTablesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListTablesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Table]: - async def async_generator(): - async for page in self.pages: - for response in page.tables: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLocksPager: - """A pager for iterating through ``list_locks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListLocksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``locks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListLocks`` requests and continue to iterate - through the ``locks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListLocksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metastore.ListLocksResponse], - request: metastore.ListLocksRequest, - response: metastore.ListLocksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListLocksRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListLocksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListLocksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metastore.ListLocksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metastore.Lock]: - for page in self.pages: - yield from page.locks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListLocksAsyncPager: - """A pager for iterating through ``list_locks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListLocksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``locks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListLocks`` requests and continue to iterate - through the ``locks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_biglake_v1alpha1.types.ListLocksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metastore.ListLocksResponse]], - request: metastore.ListLocksRequest, - response: metastore.ListLocksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_biglake_v1alpha1.types.ListLocksRequest): - The initial request object. - response (google.cloud.bigquery_biglake_v1alpha1.types.ListLocksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = metastore.ListLocksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metastore.ListLocksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metastore.Lock]: - async def async_generator(): - async for page in self.pages: - for response in page.locks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/README.rst deleted file mode 100644 index 489d37387a08..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`MetastoreServiceTransport` is the ABC for all transports. -- public child `MetastoreServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `MetastoreServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseMetastoreServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `MetastoreServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/__init__.py deleted file mode 100644 index 5bacbdea32ed..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetastoreServiceTransport -from .grpc import MetastoreServiceGrpcTransport -from .grpc_asyncio import MetastoreServiceGrpcAsyncIOTransport -from .rest import MetastoreServiceRestTransport -from .rest import MetastoreServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetastoreServiceTransport]] -_transport_registry['grpc'] = MetastoreServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MetastoreServiceGrpcAsyncIOTransport -_transport_registry['rest'] = MetastoreServiceRestTransport - -__all__ = ( - 'MetastoreServiceTransport', - 'MetastoreServiceGrpcTransport', - 'MetastoreServiceGrpcAsyncIOTransport', - 'MetastoreServiceRestTransport', - 'MetastoreServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/base.py deleted file mode 100644 index 42bc5f63abcc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/base.py +++ /dev/null @@ -1,408 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_biglake_v1alpha1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MetastoreServiceTransport(abc.ABC): - """Abstract transport class for MetastoreService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'biglake.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_catalog: gapic_v1.method.wrap_method( - self.create_catalog, - default_timeout=None, - client_info=client_info, - ), - self.delete_catalog: gapic_v1.method.wrap_method( - self.delete_catalog, - default_timeout=None, - client_info=client_info, - ), - self.get_catalog: gapic_v1.method.wrap_method( - self.get_catalog, - default_timeout=None, - client_info=client_info, - ), - self.list_catalogs: gapic_v1.method.wrap_method( - self.list_catalogs, - default_timeout=None, - client_info=client_info, - ), - self.create_database: gapic_v1.method.wrap_method( - self.create_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: gapic_v1.method.wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.update_database: gapic_v1.method.wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.get_database: gapic_v1.method.wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: gapic_v1.method.wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.create_table: gapic_v1.method.wrap_method( - self.create_table, - default_timeout=None, - client_info=client_info, - ), - self.delete_table: gapic_v1.method.wrap_method( - self.delete_table, - default_timeout=None, - client_info=client_info, - ), - self.update_table: gapic_v1.method.wrap_method( - self.update_table, - default_timeout=None, - client_info=client_info, - ), - self.rename_table: gapic_v1.method.wrap_method( - self.rename_table, - default_timeout=None, - client_info=client_info, - ), - self.get_table: gapic_v1.method.wrap_method( - self.get_table, - default_timeout=None, - client_info=client_info, - ), - self.list_tables: gapic_v1.method.wrap_method( - self.list_tables, - default_timeout=None, - client_info=client_info, - ), - self.create_lock: gapic_v1.method.wrap_method( - self.create_lock, - default_timeout=None, - client_info=client_info, - ), - self.delete_lock: gapic_v1.method.wrap_method( - self.delete_lock, - default_timeout=None, - client_info=client_info, - ), - self.check_lock: gapic_v1.method.wrap_method( - self.check_lock, - default_timeout=None, - client_info=client_info, - ), - self.list_locks: gapic_v1.method.wrap_method( - self.list_locks, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - Union[ - metastore.Catalog, - Awaitable[metastore.Catalog] - ]]: - raise NotImplementedError() - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - Union[ - metastore.Catalog, - Awaitable[metastore.Catalog] - ]]: - raise NotImplementedError() - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - Union[ - metastore.Catalog, - Awaitable[metastore.Catalog] - ]]: - raise NotImplementedError() - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - Union[ - metastore.ListCatalogsResponse, - Awaitable[metastore.ListCatalogsResponse] - ]]: - raise NotImplementedError() - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - Union[ - metastore.Database, - Awaitable[metastore.Database] - ]]: - raise NotImplementedError() - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - Union[ - metastore.ListDatabasesResponse, - Awaitable[metastore.ListDatabasesResponse] - ]]: - raise NotImplementedError() - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - Union[ - metastore.Table, - Awaitable[metastore.Table] - ]]: - raise NotImplementedError() - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - Union[ - metastore.ListTablesResponse, - Awaitable[metastore.ListTablesResponse] - ]]: - raise NotImplementedError() - - @property - def create_lock(self) -> Callable[ - [metastore.CreateLockRequest], - Union[ - metastore.Lock, - Awaitable[metastore.Lock] - ]]: - raise NotImplementedError() - - @property - def delete_lock(self) -> Callable[ - [metastore.DeleteLockRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def check_lock(self) -> Callable[ - [metastore.CheckLockRequest], - Union[ - metastore.Lock, - Awaitable[metastore.Lock] - ]]: - raise NotImplementedError() - - @property - def list_locks(self) -> Callable[ - [metastore.ListLocksRequest], - Union[ - metastore.ListLocksResponse, - Awaitable[metastore.ListLocksResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MetastoreServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/grpc.py deleted file mode 100644 index 832c2167bc72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/grpc.py +++ /dev/null @@ -1,829 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import empty_pb2 # type: ignore -from .base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MetastoreServiceGrpcTransport(MetastoreServiceTransport): - """gRPC backend transport for MetastoreService. - - BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - metastore.Catalog]: - r"""Return a callable for the create catalog method over gRPC. - - Creates a new catalog. - - Returns: - Callable[[~.CreateCatalogRequest], - ~.Catalog]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_catalog' not in self._stubs: - self._stubs['create_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateCatalog', - request_serializer=metastore.CreateCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['create_catalog'] - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - metastore.Catalog]: - r"""Return a callable for the delete catalog method over gRPC. - - Deletes an existing catalog specified by the catalog - ID. - - Returns: - Callable[[~.DeleteCatalogRequest], - ~.Catalog]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_catalog' not in self._stubs: - self._stubs['delete_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteCatalog', - request_serializer=metastore.DeleteCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['delete_catalog'] - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - metastore.Catalog]: - r"""Return a callable for the get catalog method over gRPC. - - Gets the catalog specified by the resource name. - - Returns: - Callable[[~.GetCatalogRequest], - ~.Catalog]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_catalog' not in self._stubs: - self._stubs['get_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetCatalog', - request_serializer=metastore.GetCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['get_catalog'] - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - metastore.ListCatalogsResponse]: - r"""Return a callable for the list catalogs method over gRPC. - - List all catalogs in a specified project. - - Returns: - Callable[[~.ListCatalogsRequest], - ~.ListCatalogsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_catalogs' not in self._stubs: - self._stubs['list_catalogs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListCatalogs', - request_serializer=metastore.ListCatalogsRequest.serialize, - response_deserializer=metastore.ListCatalogsResponse.deserialize, - ) - return self._stubs['list_catalogs'] - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - metastore.Database]: - r"""Return a callable for the create database method over gRPC. - - Creates a new database. - - Returns: - Callable[[~.CreateDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateDatabase', - request_serializer=metastore.CreateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['create_database'] - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - metastore.Database]: - r"""Return a callable for the delete database method over gRPC. - - Deletes an existing database specified by the - database ID. - - Returns: - Callable[[~.DeleteDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_database' not in self._stubs: - self._stubs['delete_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteDatabase', - request_serializer=metastore.DeleteDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['delete_database'] - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - metastore.Database]: - r"""Return a callable for the update database method over gRPC. - - Updates an existing database specified by the - database ID. - - Returns: - Callable[[~.UpdateDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateDatabase', - request_serializer=metastore.UpdateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['update_database'] - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - metastore.Database]: - r"""Return a callable for the get database method over gRPC. - - Gets the database specified by the resource name. - - Returns: - Callable[[~.GetDatabaseRequest], - ~.Database]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetDatabase', - request_serializer=metastore.GetDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - metastore.ListDatabasesResponse]: - r"""Return a callable for the list databases method over gRPC. - - List all databases in a specified catalog. - - Returns: - Callable[[~.ListDatabasesRequest], - ~.ListDatabasesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListDatabases', - request_serializer=metastore.ListDatabasesRequest.serialize, - response_deserializer=metastore.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - metastore.Table]: - r"""Return a callable for the create table method over gRPC. - - Creates a new table. - - Returns: - Callable[[~.CreateTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_table' not in self._stubs: - self._stubs['create_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateTable', - request_serializer=metastore.CreateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['create_table'] - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - metastore.Table]: - r"""Return a callable for the delete table method over gRPC. - - Deletes an existing table specified by the table ID. - - Returns: - Callable[[~.DeleteTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_table' not in self._stubs: - self._stubs['delete_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteTable', - request_serializer=metastore.DeleteTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['delete_table'] - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - metastore.Table]: - r"""Return a callable for the update table method over gRPC. - - Updates an existing table specified by the table ID. - - Returns: - Callable[[~.UpdateTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_table' not in self._stubs: - self._stubs['update_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateTable', - request_serializer=metastore.UpdateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['update_table'] - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - metastore.Table]: - r"""Return a callable for the rename table method over gRPC. - - Renames an existing table specified by the table ID. - - Returns: - Callable[[~.RenameTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_table' not in self._stubs: - self._stubs['rename_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/RenameTable', - request_serializer=metastore.RenameTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['rename_table'] - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - metastore.Table]: - r"""Return a callable for the get table method over gRPC. - - Gets the table specified by the resource name. - - Returns: - Callable[[~.GetTableRequest], - ~.Table]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table' not in self._stubs: - self._stubs['get_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetTable', - request_serializer=metastore.GetTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['get_table'] - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - metastore.ListTablesResponse]: - r"""Return a callable for the list tables method over gRPC. - - List all tables in a specified database. - - Returns: - Callable[[~.ListTablesRequest], - ~.ListTablesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tables' not in self._stubs: - self._stubs['list_tables'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListTables', - request_serializer=metastore.ListTablesRequest.serialize, - response_deserializer=metastore.ListTablesResponse.deserialize, - ) - return self._stubs['list_tables'] - - @property - def create_lock(self) -> Callable[ - [metastore.CreateLockRequest], - metastore.Lock]: - r"""Return a callable for the create lock method over gRPC. - - Creates a new lock. - - Returns: - Callable[[~.CreateLockRequest], - ~.Lock]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_lock' not in self._stubs: - self._stubs['create_lock'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateLock', - request_serializer=metastore.CreateLockRequest.serialize, - response_deserializer=metastore.Lock.deserialize, - ) - return self._stubs['create_lock'] - - @property - def delete_lock(self) -> Callable[ - [metastore.DeleteLockRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete lock method over gRPC. - - Deletes an existing lock specified by the lock ID. - - Returns: - Callable[[~.DeleteLockRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_lock' not in self._stubs: - self._stubs['delete_lock'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteLock', - request_serializer=metastore.DeleteLockRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_lock'] - - @property - def check_lock(self) -> Callable[ - [metastore.CheckLockRequest], - metastore.Lock]: - r"""Return a callable for the check lock method over gRPC. - - Checks the state of a lock specified by the lock ID. - - Returns: - Callable[[~.CheckLockRequest], - ~.Lock]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'check_lock' not in self._stubs: - self._stubs['check_lock'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CheckLock', - request_serializer=metastore.CheckLockRequest.serialize, - response_deserializer=metastore.Lock.deserialize, - ) - return self._stubs['check_lock'] - - @property - def list_locks(self) -> Callable[ - [metastore.ListLocksRequest], - metastore.ListLocksResponse]: - r"""Return a callable for the list locks method over gRPC. - - List all locks in a specified database. - - Returns: - Callable[[~.ListLocksRequest], - ~.ListLocksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_locks' not in self._stubs: - self._stubs['list_locks'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListLocks', - request_serializer=metastore.ListLocksRequest.serialize, - response_deserializer=metastore.ListLocksResponse.deserialize, - ) - return self._stubs['list_locks'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MetastoreServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/grpc_asyncio.py deleted file mode 100644 index a3d70cb7f133..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,939 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import empty_pb2 # type: ignore -from .base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MetastoreServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MetastoreServiceGrpcAsyncIOTransport(MetastoreServiceTransport): - """gRPC AsyncIO backend transport for MetastoreService. - - BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - Awaitable[metastore.Catalog]]: - r"""Return a callable for the create catalog method over gRPC. - - Creates a new catalog. - - Returns: - Callable[[~.CreateCatalogRequest], - Awaitable[~.Catalog]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_catalog' not in self._stubs: - self._stubs['create_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateCatalog', - request_serializer=metastore.CreateCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['create_catalog'] - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - Awaitable[metastore.Catalog]]: - r"""Return a callable for the delete catalog method over gRPC. - - Deletes an existing catalog specified by the catalog - ID. - - Returns: - Callable[[~.DeleteCatalogRequest], - Awaitable[~.Catalog]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_catalog' not in self._stubs: - self._stubs['delete_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteCatalog', - request_serializer=metastore.DeleteCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['delete_catalog'] - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - Awaitable[metastore.Catalog]]: - r"""Return a callable for the get catalog method over gRPC. - - Gets the catalog specified by the resource name. - - Returns: - Callable[[~.GetCatalogRequest], - Awaitable[~.Catalog]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_catalog' not in self._stubs: - self._stubs['get_catalog'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetCatalog', - request_serializer=metastore.GetCatalogRequest.serialize, - response_deserializer=metastore.Catalog.deserialize, - ) - return self._stubs['get_catalog'] - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - Awaitable[metastore.ListCatalogsResponse]]: - r"""Return a callable for the list catalogs method over gRPC. - - List all catalogs in a specified project. - - Returns: - Callable[[~.ListCatalogsRequest], - Awaitable[~.ListCatalogsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_catalogs' not in self._stubs: - self._stubs['list_catalogs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListCatalogs', - request_serializer=metastore.ListCatalogsRequest.serialize, - response_deserializer=metastore.ListCatalogsResponse.deserialize, - ) - return self._stubs['list_catalogs'] - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the create database method over gRPC. - - Creates a new database. - - Returns: - Callable[[~.CreateDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_database' not in self._stubs: - self._stubs['create_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateDatabase', - request_serializer=metastore.CreateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['create_database'] - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the delete database method over gRPC. - - Deletes an existing database specified by the - database ID. - - Returns: - Callable[[~.DeleteDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_database' not in self._stubs: - self._stubs['delete_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteDatabase', - request_serializer=metastore.DeleteDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['delete_database'] - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the update database method over gRPC. - - Updates an existing database specified by the - database ID. - - Returns: - Callable[[~.UpdateDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_database' not in self._stubs: - self._stubs['update_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateDatabase', - request_serializer=metastore.UpdateDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['update_database'] - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - Awaitable[metastore.Database]]: - r"""Return a callable for the get database method over gRPC. - - Gets the database specified by the resource name. - - Returns: - Callable[[~.GetDatabaseRequest], - Awaitable[~.Database]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_database' not in self._stubs: - self._stubs['get_database'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetDatabase', - request_serializer=metastore.GetDatabaseRequest.serialize, - response_deserializer=metastore.Database.deserialize, - ) - return self._stubs['get_database'] - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - Awaitable[metastore.ListDatabasesResponse]]: - r"""Return a callable for the list databases method over gRPC. - - List all databases in a specified catalog. - - Returns: - Callable[[~.ListDatabasesRequest], - Awaitable[~.ListDatabasesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_databases' not in self._stubs: - self._stubs['list_databases'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListDatabases', - request_serializer=metastore.ListDatabasesRequest.serialize, - response_deserializer=metastore.ListDatabasesResponse.deserialize, - ) - return self._stubs['list_databases'] - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the create table method over gRPC. - - Creates a new table. - - Returns: - Callable[[~.CreateTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_table' not in self._stubs: - self._stubs['create_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateTable', - request_serializer=metastore.CreateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['create_table'] - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the delete table method over gRPC. - - Deletes an existing table specified by the table ID. - - Returns: - Callable[[~.DeleteTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_table' not in self._stubs: - self._stubs['delete_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteTable', - request_serializer=metastore.DeleteTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['delete_table'] - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the update table method over gRPC. - - Updates an existing table specified by the table ID. - - Returns: - Callable[[~.UpdateTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_table' not in self._stubs: - self._stubs['update_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/UpdateTable', - request_serializer=metastore.UpdateTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['update_table'] - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the rename table method over gRPC. - - Renames an existing table specified by the table ID. - - Returns: - Callable[[~.RenameTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_table' not in self._stubs: - self._stubs['rename_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/RenameTable', - request_serializer=metastore.RenameTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['rename_table'] - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - Awaitable[metastore.Table]]: - r"""Return a callable for the get table method over gRPC. - - Gets the table specified by the resource name. - - Returns: - Callable[[~.GetTableRequest], - Awaitable[~.Table]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_table' not in self._stubs: - self._stubs['get_table'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/GetTable', - request_serializer=metastore.GetTableRequest.serialize, - response_deserializer=metastore.Table.deserialize, - ) - return self._stubs['get_table'] - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - Awaitable[metastore.ListTablesResponse]]: - r"""Return a callable for the list tables method over gRPC. - - List all tables in a specified database. - - Returns: - Callable[[~.ListTablesRequest], - Awaitable[~.ListTablesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_tables' not in self._stubs: - self._stubs['list_tables'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListTables', - request_serializer=metastore.ListTablesRequest.serialize, - response_deserializer=metastore.ListTablesResponse.deserialize, - ) - return self._stubs['list_tables'] - - @property - def create_lock(self) -> Callable[ - [metastore.CreateLockRequest], - Awaitable[metastore.Lock]]: - r"""Return a callable for the create lock method over gRPC. - - Creates a new lock. - - Returns: - Callable[[~.CreateLockRequest], - Awaitable[~.Lock]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_lock' not in self._stubs: - self._stubs['create_lock'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CreateLock', - request_serializer=metastore.CreateLockRequest.serialize, - response_deserializer=metastore.Lock.deserialize, - ) - return self._stubs['create_lock'] - - @property - def delete_lock(self) -> Callable[ - [metastore.DeleteLockRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete lock method over gRPC. - - Deletes an existing lock specified by the lock ID. - - Returns: - Callable[[~.DeleteLockRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_lock' not in self._stubs: - self._stubs['delete_lock'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/DeleteLock', - request_serializer=metastore.DeleteLockRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_lock'] - - @property - def check_lock(self) -> Callable[ - [metastore.CheckLockRequest], - Awaitable[metastore.Lock]]: - r"""Return a callable for the check lock method over gRPC. - - Checks the state of a lock specified by the lock ID. - - Returns: - Callable[[~.CheckLockRequest], - Awaitable[~.Lock]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'check_lock' not in self._stubs: - self._stubs['check_lock'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/CheckLock', - request_serializer=metastore.CheckLockRequest.serialize, - response_deserializer=metastore.Lock.deserialize, - ) - return self._stubs['check_lock'] - - @property - def list_locks(self) -> Callable[ - [metastore.ListLocksRequest], - Awaitable[metastore.ListLocksResponse]]: - r"""Return a callable for the list locks method over gRPC. - - List all locks in a specified database. - - Returns: - Callable[[~.ListLocksRequest], - Awaitable[~.ListLocksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_locks' not in self._stubs: - self._stubs['list_locks'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.biglake.v1alpha1.MetastoreService/ListLocks', - request_serializer=metastore.ListLocksRequest.serialize, - response_deserializer=metastore.ListLocksResponse.deserialize, - ) - return self._stubs['list_locks'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_catalog: self._wrap_method( - self.create_catalog, - default_timeout=None, - client_info=client_info, - ), - self.delete_catalog: self._wrap_method( - self.delete_catalog, - default_timeout=None, - client_info=client_info, - ), - self.get_catalog: self._wrap_method( - self.get_catalog, - default_timeout=None, - client_info=client_info, - ), - self.list_catalogs: self._wrap_method( - self.list_catalogs, - default_timeout=None, - client_info=client_info, - ), - self.create_database: self._wrap_method( - self.create_database, - default_timeout=None, - client_info=client_info, - ), - self.delete_database: self._wrap_method( - self.delete_database, - default_timeout=None, - client_info=client_info, - ), - self.update_database: self._wrap_method( - self.update_database, - default_timeout=None, - client_info=client_info, - ), - self.get_database: self._wrap_method( - self.get_database, - default_timeout=None, - client_info=client_info, - ), - self.list_databases: self._wrap_method( - self.list_databases, - default_timeout=None, - client_info=client_info, - ), - self.create_table: self._wrap_method( - self.create_table, - default_timeout=None, - client_info=client_info, - ), - self.delete_table: self._wrap_method( - self.delete_table, - default_timeout=None, - client_info=client_info, - ), - self.update_table: self._wrap_method( - self.update_table, - default_timeout=None, - client_info=client_info, - ), - self.rename_table: self._wrap_method( - self.rename_table, - default_timeout=None, - client_info=client_info, - ), - self.get_table: self._wrap_method( - self.get_table, - default_timeout=None, - client_info=client_info, - ), - self.list_tables: self._wrap_method( - self.list_tables, - default_timeout=None, - client_info=client_info, - ), - self.create_lock: self._wrap_method( - self.create_lock, - default_timeout=None, - client_info=client_info, - ), - self.delete_lock: self._wrap_method( - self.delete_lock, - default_timeout=None, - client_info=client_info, - ), - self.check_lock: self._wrap_method( - self.check_lock, - default_timeout=None, - client_info=client_info, - ), - self.list_locks: self._wrap_method( - self.list_locks, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'MetastoreServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py deleted file mode 100644 index cf829bedcf1c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest.py +++ /dev/null @@ -1,3427 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseMetastoreServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class MetastoreServiceRestInterceptor: - """Interceptor for MetastoreService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MetastoreServiceRestTransport. - - .. code-block:: python - class MyCustomMetastoreServiceInterceptor(MetastoreServiceRestInterceptor): - def pre_check_lock(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_check_lock(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_catalog(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_catalog(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_lock(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_lock(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_catalog(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_catalog(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_lock(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_catalog(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_catalog(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_catalogs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_catalogs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_databases(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_databases(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_locks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_locks(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_tables(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_tables(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_table(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_database(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_database(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_table(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_table(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MetastoreServiceRestTransport(interceptor=MyCustomMetastoreServiceInterceptor()) - client = MetastoreServiceClient(transport=transport) - - - """ - def pre_check_lock(self, request: metastore.CheckLockRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CheckLockRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for check_lock - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_check_lock(self, response: metastore.Lock) -> metastore.Lock: - """Post-rpc interceptor for check_lock - - DEPRECATED. Please use the `post_check_lock_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_check_lock` interceptor runs - before the `post_check_lock_with_metadata` interceptor. - """ - return response - - def post_check_lock_with_metadata(self, response: metastore.Lock, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Lock, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for check_lock - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_check_lock_with_metadata` - interceptor in new development instead of the `post_check_lock` interceptor. - When both interceptors are used, this `post_check_lock_with_metadata` interceptor runs after the - `post_check_lock` interceptor. The (possibly modified) response returned by - `post_check_lock` will be passed to - `post_check_lock_with_metadata`. - """ - return response, metadata - - def pre_create_catalog(self, request: metastore.CreateCatalogRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateCatalogRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_catalog - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_catalog(self, response: metastore.Catalog) -> metastore.Catalog: - """Post-rpc interceptor for create_catalog - - DEPRECATED. Please use the `post_create_catalog_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_catalog` interceptor runs - before the `post_create_catalog_with_metadata` interceptor. - """ - return response - - def post_create_catalog_with_metadata(self, response: metastore.Catalog, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_catalog - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_catalog_with_metadata` - interceptor in new development instead of the `post_create_catalog` interceptor. - When both interceptors are used, this `post_create_catalog_with_metadata` interceptor runs after the - `post_create_catalog` interceptor. The (possibly modified) response returned by - `post_create_catalog` will be passed to - `post_create_catalog_with_metadata`. - """ - return response, metadata - - def pre_create_database(self, request: metastore.CreateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for create_database - - DEPRECATED. Please use the `post_create_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_database` interceptor runs - before the `post_create_database_with_metadata` interceptor. - """ - return response - - def post_create_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_database_with_metadata` - interceptor in new development instead of the `post_create_database` interceptor. - When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the - `post_create_database` interceptor. The (possibly modified) response returned by - `post_create_database` will be passed to - `post_create_database_with_metadata`. - """ - return response, metadata - - def pre_create_lock(self, request: metastore.CreateLockRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateLockRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_lock - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_lock(self, response: metastore.Lock) -> metastore.Lock: - """Post-rpc interceptor for create_lock - - DEPRECATED. Please use the `post_create_lock_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_lock` interceptor runs - before the `post_create_lock_with_metadata` interceptor. - """ - return response - - def post_create_lock_with_metadata(self, response: metastore.Lock, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Lock, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_lock - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_lock_with_metadata` - interceptor in new development instead of the `post_create_lock` interceptor. - When both interceptors are used, this `post_create_lock_with_metadata` interceptor runs after the - `post_create_lock` interceptor. The (possibly modified) response returned by - `post_create_lock` will be passed to - `post_create_lock_with_metadata`. - """ - return response, metadata - - def pre_create_table(self, request: metastore.CreateTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.CreateTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_create_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for create_table - - DEPRECATED. Please use the `post_create_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_create_table` interceptor runs - before the `post_create_table_with_metadata` interceptor. - """ - return response - - def post_create_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_create_table_with_metadata` - interceptor in new development instead of the `post_create_table` interceptor. - When both interceptors are used, this `post_create_table_with_metadata` interceptor runs after the - `post_create_table` interceptor. The (possibly modified) response returned by - `post_create_table` will be passed to - `post_create_table_with_metadata`. - """ - return response, metadata - - def pre_delete_catalog(self, request: metastore.DeleteCatalogRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteCatalogRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_catalog - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_delete_catalog(self, response: metastore.Catalog) -> metastore.Catalog: - """Post-rpc interceptor for delete_catalog - - DEPRECATED. Please use the `post_delete_catalog_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_delete_catalog` interceptor runs - before the `post_delete_catalog_with_metadata` interceptor. - """ - return response - - def post_delete_catalog_with_metadata(self, response: metastore.Catalog, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_catalog - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_delete_catalog_with_metadata` - interceptor in new development instead of the `post_delete_catalog` interceptor. - When both interceptors are used, this `post_delete_catalog_with_metadata` interceptor runs after the - `post_delete_catalog` interceptor. The (possibly modified) response returned by - `post_delete_catalog` will be passed to - `post_delete_catalog_with_metadata`. - """ - return response, metadata - - def pre_delete_database(self, request: metastore.DeleteDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_delete_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for delete_database - - DEPRECATED. Please use the `post_delete_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_delete_database` interceptor runs - before the `post_delete_database_with_metadata` interceptor. - """ - return response - - def post_delete_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_delete_database_with_metadata` - interceptor in new development instead of the `post_delete_database` interceptor. - When both interceptors are used, this `post_delete_database_with_metadata` interceptor runs after the - `post_delete_database` interceptor. The (possibly modified) response returned by - `post_delete_database` will be passed to - `post_delete_database_with_metadata`. - """ - return response, metadata - - def pre_delete_lock(self, request: metastore.DeleteLockRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteLockRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_lock - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def pre_delete_table(self, request: metastore.DeleteTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.DeleteTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_delete_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for delete_table - - DEPRECATED. Please use the `post_delete_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_delete_table` interceptor runs - before the `post_delete_table_with_metadata` interceptor. - """ - return response - - def post_delete_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for delete_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_delete_table_with_metadata` - interceptor in new development instead of the `post_delete_table` interceptor. - When both interceptors are used, this `post_delete_table_with_metadata` interceptor runs after the - `post_delete_table` interceptor. The (possibly modified) response returned by - `post_delete_table` will be passed to - `post_delete_table_with_metadata`. - """ - return response, metadata - - def pre_get_catalog(self, request: metastore.GetCatalogRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.GetCatalogRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_catalog - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_get_catalog(self, response: metastore.Catalog) -> metastore.Catalog: - """Post-rpc interceptor for get_catalog - - DEPRECATED. Please use the `post_get_catalog_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_get_catalog` interceptor runs - before the `post_get_catalog_with_metadata` interceptor. - """ - return response - - def post_get_catalog_with_metadata(self, response: metastore.Catalog, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Catalog, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_catalog - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_get_catalog_with_metadata` - interceptor in new development instead of the `post_get_catalog` interceptor. - When both interceptors are used, this `post_get_catalog_with_metadata` interceptor runs after the - `post_get_catalog` interceptor. The (possibly modified) response returned by - `post_get_catalog` will be passed to - `post_get_catalog_with_metadata`. - """ - return response, metadata - - def pre_get_database(self, request: metastore.GetDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.GetDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_get_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for get_database - - DEPRECATED. Please use the `post_get_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_get_database` interceptor runs - before the `post_get_database_with_metadata` interceptor. - """ - return response - - def post_get_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_get_database_with_metadata` - interceptor in new development instead of the `post_get_database` interceptor. - When both interceptors are used, this `post_get_database_with_metadata` interceptor runs after the - `post_get_database` interceptor. The (possibly modified) response returned by - `post_get_database` will be passed to - `post_get_database_with_metadata`. - """ - return response, metadata - - def pre_get_table(self, request: metastore.GetTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.GetTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_get_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for get_table - - DEPRECATED. Please use the `post_get_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_get_table` interceptor runs - before the `post_get_table_with_metadata` interceptor. - """ - return response - - def post_get_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_get_table_with_metadata` - interceptor in new development instead of the `post_get_table` interceptor. - When both interceptors are used, this `post_get_table_with_metadata` interceptor runs after the - `post_get_table` interceptor. The (possibly modified) response returned by - `post_get_table` will be passed to - `post_get_table_with_metadata`. - """ - return response, metadata - - def pre_list_catalogs(self, request: metastore.ListCatalogsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListCatalogsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_catalogs - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_catalogs(self, response: metastore.ListCatalogsResponse) -> metastore.ListCatalogsResponse: - """Post-rpc interceptor for list_catalogs - - DEPRECATED. Please use the `post_list_catalogs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_catalogs` interceptor runs - before the `post_list_catalogs_with_metadata` interceptor. - """ - return response - - def post_list_catalogs_with_metadata(self, response: metastore.ListCatalogsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListCatalogsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_catalogs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_catalogs_with_metadata` - interceptor in new development instead of the `post_list_catalogs` interceptor. - When both interceptors are used, this `post_list_catalogs_with_metadata` interceptor runs after the - `post_list_catalogs` interceptor. The (possibly modified) response returned by - `post_list_catalogs` will be passed to - `post_list_catalogs_with_metadata`. - """ - return response, metadata - - def pre_list_databases(self, request: metastore.ListDatabasesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListDatabasesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_databases - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_databases(self, response: metastore.ListDatabasesResponse) -> metastore.ListDatabasesResponse: - """Post-rpc interceptor for list_databases - - DEPRECATED. Please use the `post_list_databases_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_databases` interceptor runs - before the `post_list_databases_with_metadata` interceptor. - """ - return response - - def post_list_databases_with_metadata(self, response: metastore.ListDatabasesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListDatabasesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_databases - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_databases_with_metadata` - interceptor in new development instead of the `post_list_databases` interceptor. - When both interceptors are used, this `post_list_databases_with_metadata` interceptor runs after the - `post_list_databases` interceptor. The (possibly modified) response returned by - `post_list_databases` will be passed to - `post_list_databases_with_metadata`. - """ - return response, metadata - - def pre_list_locks(self, request: metastore.ListLocksRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListLocksRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locks - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_locks(self, response: metastore.ListLocksResponse) -> metastore.ListLocksResponse: - """Post-rpc interceptor for list_locks - - DEPRECATED. Please use the `post_list_locks_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_locks` interceptor runs - before the `post_list_locks_with_metadata` interceptor. - """ - return response - - def post_list_locks_with_metadata(self, response: metastore.ListLocksResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListLocksResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_locks - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_locks_with_metadata` - interceptor in new development instead of the `post_list_locks` interceptor. - When both interceptors are used, this `post_list_locks_with_metadata` interceptor runs after the - `post_list_locks` interceptor. The (possibly modified) response returned by - `post_list_locks` will be passed to - `post_list_locks_with_metadata`. - """ - return response, metadata - - def pre_list_tables(self, request: metastore.ListTablesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListTablesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_tables - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_list_tables(self, response: metastore.ListTablesResponse) -> metastore.ListTablesResponse: - """Post-rpc interceptor for list_tables - - DEPRECATED. Please use the `post_list_tables_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_list_tables` interceptor runs - before the `post_list_tables_with_metadata` interceptor. - """ - return response - - def post_list_tables_with_metadata(self, response: metastore.ListTablesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.ListTablesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_tables - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_list_tables_with_metadata` - interceptor in new development instead of the `post_list_tables` interceptor. - When both interceptors are used, this `post_list_tables_with_metadata` interceptor runs after the - `post_list_tables` interceptor. The (possibly modified) response returned by - `post_list_tables` will be passed to - `post_list_tables_with_metadata`. - """ - return response, metadata - - def pre_rename_table(self, request: metastore.RenameTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.RenameTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_rename_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for rename_table - - DEPRECATED. Please use the `post_rename_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_rename_table` interceptor runs - before the `post_rename_table_with_metadata` interceptor. - """ - return response - - def post_rename_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_rename_table_with_metadata` - interceptor in new development instead of the `post_rename_table` interceptor. - When both interceptors are used, this `post_rename_table_with_metadata` interceptor runs after the - `post_rename_table` interceptor. The (possibly modified) response returned by - `post_rename_table` will be passed to - `post_rename_table_with_metadata`. - """ - return response, metadata - - def pre_update_database(self, request: metastore.UpdateDatabaseRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.UpdateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_database - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_update_database(self, response: metastore.Database) -> metastore.Database: - """Post-rpc interceptor for update_database - - DEPRECATED. Please use the `post_update_database_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_update_database` interceptor runs - before the `post_update_database_with_metadata` interceptor. - """ - return response - - def post_update_database_with_metadata(self, response: metastore.Database, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Database, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_database - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_update_database_with_metadata` - interceptor in new development instead of the `post_update_database` interceptor. - When both interceptors are used, this `post_update_database_with_metadata` interceptor runs after the - `post_update_database` interceptor. The (possibly modified) response returned by - `post_update_database` will be passed to - `post_update_database_with_metadata`. - """ - return response, metadata - - def pre_update_table(self, request: metastore.UpdateTableRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.UpdateTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_table - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetastoreService server. - """ - return request, metadata - - def post_update_table(self, response: metastore.Table) -> metastore.Table: - """Post-rpc interceptor for update_table - - DEPRECATED. Please use the `post_update_table_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the MetastoreService server but before - it is returned to user code. This `post_update_table` interceptor runs - before the `post_update_table_with_metadata` interceptor. - """ - return response - - def post_update_table_with_metadata(self, response: metastore.Table, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[metastore.Table, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_table - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the MetastoreService server but before it is returned to user code. - - We recommend only using this `post_update_table_with_metadata` - interceptor in new development instead of the `post_update_table` interceptor. - When both interceptors are used, this `post_update_table_with_metadata` interceptor runs after the - `post_update_table` interceptor. The (possibly modified) response returned by - `post_update_table` will be passed to - `post_update_table_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class MetastoreServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: MetastoreServiceRestInterceptor - - -class MetastoreServiceRestTransport(_BaseMetastoreServiceRestTransport): - """REST backend synchronous transport for MetastoreService. - - BigLake Metastore is a serverless, highly available, multi-tenant - runtime metastore for Google Cloud Data Analytics products. - - The BigLake Metastore API defines the following resource model: - - - A collection of Google Cloud projects: ``/projects/*`` - - Each project has a collection of available locations: - ``/locations/*`` - - Each location has a collection of catalogs: ``/catalogs/*`` - - Each catalog has a collection of databases: ``/databases/*`` - - Each database has a collection of tables: ``/tables/*`` - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MetastoreServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MetastoreServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CheckLock(_BaseMetastoreServiceRestTransport._BaseCheckLock, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CheckLock") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CheckLockRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Lock: - r"""Call the check lock method over HTTP. - - Args: - request (~.metastore.CheckLockRequest): - The request object. Request message for the CheckLock - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Lock: - Represents a lock. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCheckLock._get_http_options() - - request, metadata = self._interceptor.pre_check_lock(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCheckLock._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCheckLock._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCheckLock._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.CheckLock", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CheckLock", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CheckLock._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Lock() - pb_resp = metastore.Lock.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_check_lock(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_check_lock_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Lock.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.check_lock", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CheckLock", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateCatalog(_BaseMetastoreServiceRestTransport._BaseCreateCatalog, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateCatalog") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateCatalogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Catalog: - r"""Call the create catalog method over HTTP. - - Args: - request (~.metastore.CreateCatalogRequest): - The request object. Request message for the CreateCatalog - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Catalog: - Catalog is the container of - databases. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_http_options() - - request, metadata = self._interceptor.pre_create_catalog(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.CreateCatalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateCatalog", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateCatalog._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Catalog() - pb_resp = metastore.Catalog.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_catalog(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_catalog_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Catalog.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_catalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateCatalog", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateDatabase(_BaseMetastoreServiceRestTransport._BaseCreateDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the create database method over HTTP. - - Args: - request (~.metastore.CreateDatabaseRequest): - The request object. Request message for the - CreateDatabase method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_create_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.CreateDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateLock(_BaseMetastoreServiceRestTransport._BaseCreateLock, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateLock") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateLockRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Lock: - r"""Call the create lock method over HTTP. - - Args: - request (~.metastore.CreateLockRequest): - The request object. Request message for the CreateLock - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Lock: - Represents a lock. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateLock._get_http_options() - - request, metadata = self._interceptor.pre_create_lock(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateLock._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateLock._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateLock._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.CreateLock", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateLock", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateLock._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Lock() - pb_resp = metastore.Lock.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_lock(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_lock_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Lock.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_lock", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateLock", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateTable(_BaseMetastoreServiceRestTransport._BaseCreateTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.CreateTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.CreateTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the create table method over HTTP. - - Args: - request (~.metastore.CreateTableRequest): - The request object. Request message for the CreateTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_http_options() - - request, metadata = self._interceptor.pre_create_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseCreateTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.CreateTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._CreateTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.create_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "CreateTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteCatalog(_BaseMetastoreServiceRestTransport._BaseDeleteCatalog, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteCatalog") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteCatalogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Catalog: - r"""Call the delete catalog method over HTTP. - - Args: - request (~.metastore.DeleteCatalogRequest): - The request object. Request message for the DeleteCatalog - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Catalog: - Catalog is the container of - databases. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_http_options() - - request, metadata = self._interceptor.pre_delete_catalog(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.DeleteCatalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteCatalog", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteCatalog._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Catalog() - pb_resp = metastore.Catalog.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_catalog(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_catalog_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Catalog.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_catalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteCatalog", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDatabase(_BaseMetastoreServiceRestTransport._BaseDeleteDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the delete database method over HTTP. - - Args: - request (~.metastore.DeleteDatabaseRequest): - The request object. Request message for the - DeleteDatabase method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_http_options() - - request, metadata = self._interceptor.pre_delete_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.DeleteDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteLock(_BaseMetastoreServiceRestTransport._BaseDeleteLock, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteLock") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteLockRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete lock method over HTTP. - - Args: - request (~.metastore.DeleteLockRequest): - The request object. Request message for the DeleteLock - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteLock._get_http_options() - - request, metadata = self._interceptor.pre_delete_lock(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteLock._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteLock._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.DeleteLock", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteLock", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteLock._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteTable(_BaseMetastoreServiceRestTransport._BaseDeleteTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.DeleteTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.DeleteTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the delete table method over HTTP. - - Args: - request (~.metastore.DeleteTableRequest): - The request object. Request message for the DeleteTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseDeleteTable._get_http_options() - - request, metadata = self._interceptor.pre_delete_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseDeleteTable._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseDeleteTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.DeleteTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._DeleteTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_delete_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.delete_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "DeleteTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetCatalog(_BaseMetastoreServiceRestTransport._BaseGetCatalog, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.GetCatalog") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.GetCatalogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Catalog: - r"""Call the get catalog method over HTTP. - - Args: - request (~.metastore.GetCatalogRequest): - The request object. Request message for the GetCatalog - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Catalog: - Catalog is the container of - databases. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseGetCatalog._get_http_options() - - request, metadata = self._interceptor.pre_get_catalog(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseGetCatalog._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseGetCatalog._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.GetCatalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "GetCatalog", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._GetCatalog._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Catalog() - pb_resp = metastore.Catalog.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_catalog(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_catalog_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Catalog.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.get_catalog", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "GetCatalog", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetDatabase(_BaseMetastoreServiceRestTransport._BaseGetDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.GetDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.GetDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the get database method over HTTP. - - Args: - request (~.metastore.GetDatabaseRequest): - The request object. Request message for the GetDatabase - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseGetDatabase._get_http_options() - - request, metadata = self._interceptor.pre_get_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseGetDatabase._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseGetDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.GetDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "GetDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._GetDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.get_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "GetDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTable(_BaseMetastoreServiceRestTransport._BaseGetTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.GetTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.GetTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the get table method over HTTP. - - Args: - request (~.metastore.GetTableRequest): - The request object. Request message for the GetTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseGetTable._get_http_options() - - request, metadata = self._interceptor.pre_get_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseGetTable._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseGetTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.GetTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "GetTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._GetTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.get_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "GetTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListCatalogs(_BaseMetastoreServiceRestTransport._BaseListCatalogs, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListCatalogs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListCatalogsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListCatalogsResponse: - r"""Call the list catalogs method over HTTP. - - Args: - request (~.metastore.ListCatalogsRequest): - The request object. Request message for the ListCatalogs - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListCatalogsResponse: - Response message for the ListCatalogs - method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListCatalogs._get_http_options() - - request, metadata = self._interceptor.pre_list_catalogs(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListCatalogs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListCatalogs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.ListCatalogs", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListCatalogs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListCatalogs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListCatalogsResponse() - pb_resp = metastore.ListCatalogsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_catalogs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_catalogs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListCatalogsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_catalogs", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListCatalogs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDatabases(_BaseMetastoreServiceRestTransport._BaseListDatabases, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListDatabases") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListDatabasesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListDatabasesResponse: - r"""Call the list databases method over HTTP. - - Args: - request (~.metastore.ListDatabasesRequest): - The request object. Request message for the ListDatabases - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListDatabasesResponse: - Response message for the - ListDatabases method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListDatabases._get_http_options() - - request, metadata = self._interceptor.pre_list_databases(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListDatabases._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListDatabases._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.ListDatabases", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListDatabases", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListDatabases._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListDatabasesResponse() - pb_resp = metastore.ListDatabasesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_databases(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_databases_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListDatabasesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_databases", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListDatabases", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListLocks(_BaseMetastoreServiceRestTransport._BaseListLocks, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListLocks") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListLocksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListLocksResponse: - r"""Call the list locks method over HTTP. - - Args: - request (~.metastore.ListLocksRequest): - The request object. Request message for the ListLocks - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListLocksResponse: - Response message for the ListLocks - method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListLocks._get_http_options() - - request, metadata = self._interceptor.pre_list_locks(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListLocks._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListLocks._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.ListLocks", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListLocks", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListLocks._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListLocksResponse() - pb_resp = metastore.ListLocksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_locks(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_locks_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListLocksResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_locks", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListLocks", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTables(_BaseMetastoreServiceRestTransport._BaseListTables, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.ListTables") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: metastore.ListTablesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.ListTablesResponse: - r"""Call the list tables method over HTTP. - - Args: - request (~.metastore.ListTablesRequest): - The request object. Request message for the ListTables - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.ListTablesResponse: - Response message for the ListTables - method. - - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseListTables._get_http_options() - - request, metadata = self._interceptor.pre_list_tables(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseListTables._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseListTables._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.ListTables", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListTables", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._ListTables._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.ListTablesResponse() - pb_resp = metastore.ListTablesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_tables(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_tables_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.ListTablesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.list_tables", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "ListTables", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameTable(_BaseMetastoreServiceRestTransport._BaseRenameTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.RenameTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.RenameTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the rename table method over HTTP. - - Args: - request (~.metastore.RenameTableRequest): - The request object. Request message for the RenameTable - method in MetastoreService - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_http_options() - - request, metadata = self._interceptor.pre_rename_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseRenameTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.RenameTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "RenameTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._RenameTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.rename_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "RenameTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDatabase(_BaseMetastoreServiceRestTransport._BaseUpdateDatabase, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.UpdateDatabase") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.UpdateDatabaseRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Database: - r"""Call the update database method over HTTP. - - Args: - request (~.metastore.UpdateDatabaseRequest): - The request object. Request message for the - UpdateDatabase method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Database: - Database is the container of tables. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_http_options() - - request, metadata = self._interceptor.pre_update_database(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.UpdateDatabase", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "UpdateDatabase", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._UpdateDatabase._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Database() - pb_resp = metastore.Database.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_database(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_database_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Database.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.update_database", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "UpdateDatabase", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateTable(_BaseMetastoreServiceRestTransport._BaseUpdateTable, MetastoreServiceRestStub): - def __hash__(self): - return hash("MetastoreServiceRestTransport.UpdateTable") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: metastore.UpdateTableRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> metastore.Table: - r"""Call the update table method over HTTP. - - Args: - request (~.metastore.UpdateTableRequest): - The request object. Request message for the UpdateTable - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.metastore.Table: - Represents a table. - """ - - http_options = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_http_options() - - request, metadata = self._interceptor.pre_update_table(request, metadata) - transcoded_request = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_transcoded_request(http_options, request) - - body = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseMetastoreServiceRestTransport._BaseUpdateTable._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.UpdateTable", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "UpdateTable", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = MetastoreServiceRestTransport._UpdateTable._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metastore.Table() - pb_resp = metastore.Table.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_table(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_table_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = metastore.Table.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.biglake_v1alpha1.MetastoreServiceClient.update_table", - extra = { - "serviceName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "rpcName": "UpdateTable", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def check_lock(self) -> Callable[ - [metastore.CheckLockRequest], - metastore.Lock]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CheckLock(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_catalog(self) -> Callable[ - [metastore.CreateCatalogRequest], - metastore.Catalog]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateCatalog(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_database(self) -> Callable[ - [metastore.CreateDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_lock(self) -> Callable[ - [metastore.CreateLockRequest], - metastore.Lock]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateLock(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_table(self) -> Callable[ - [metastore.CreateTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_catalog(self) -> Callable[ - [metastore.DeleteCatalogRequest], - metastore.Catalog]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteCatalog(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_database(self) -> Callable[ - [metastore.DeleteDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_lock(self) -> Callable[ - [metastore.DeleteLockRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteLock(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_table(self) -> Callable[ - [metastore.DeleteTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_catalog(self) -> Callable[ - [metastore.GetCatalogRequest], - metastore.Catalog]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCatalog(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_database(self) -> Callable[ - [metastore.GetDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_table(self) -> Callable[ - [metastore.GetTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_catalogs(self) -> Callable[ - [metastore.ListCatalogsRequest], - metastore.ListCatalogsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCatalogs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_databases(self) -> Callable[ - [metastore.ListDatabasesRequest], - metastore.ListDatabasesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDatabases(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_locks(self) -> Callable[ - [metastore.ListLocksRequest], - metastore.ListLocksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLocks(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_tables(self) -> Callable[ - [metastore.ListTablesRequest], - metastore.ListTablesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTables(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_table(self) -> Callable[ - [metastore.RenameTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_database(self) -> Callable[ - [metastore.UpdateDatabaseRequest], - metastore.Database]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDatabase(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_table(self) -> Callable[ - [metastore.UpdateTableRequest], - metastore.Table]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateTable(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MetastoreServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest_base.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest_base.py deleted file mode 100644 index 726695c47291..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/transports/rest_base.py +++ /dev/null @@ -1,875 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import MetastoreServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseMetastoreServiceRestTransport(MetastoreServiceTransport): - """Base REST backend transport for MetastoreService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'biglake.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'biglake.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCheckLock: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}:check', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CheckLockRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCheckLock._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateCatalog: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "catalogId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha1/{parent=projects/*/locations/*}/catalogs', - 'body': 'catalog', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateCatalogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateCatalog._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "databaseId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases', - 'body': 'database', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateLock: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks', - 'body': 'lock', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateLockRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateLock._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "tableId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables', - 'body': 'table', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.CreateTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseCreateTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteCatalog: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteCatalogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteCatalog._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteLock: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteLockRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteLock._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.DeleteTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseDeleteTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetCatalog: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.GetCatalogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseGetCatalog._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.GetDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseGetDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.GetTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseGetTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListCatalogs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{parent=projects/*/locations/*}/catalogs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListCatalogsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListCatalogs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDatabases: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListDatabasesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListDatabases._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListLocks: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListLocksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListLocks._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTables: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.ListTablesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseListTables._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.RenameTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseRenameTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDatabase: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1alpha1/{database.name=projects/*/locations/*/catalogs/*/databases/*}', - 'body': 'database', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.UpdateDatabaseRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseUpdateDatabase._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateTable: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1alpha1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}', - 'body': 'table', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = metastore.UpdateTableRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseMetastoreServiceRestTransport._BaseUpdateTable._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseMetastoreServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/types/__init__.py deleted file mode 100644 index d67e50fb503b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/types/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .metastore import ( - Catalog, - CheckLockRequest, - CreateCatalogRequest, - CreateDatabaseRequest, - CreateLockRequest, - CreateTableRequest, - Database, - DeleteCatalogRequest, - DeleteDatabaseRequest, - DeleteLockRequest, - DeleteTableRequest, - GetCatalogRequest, - GetDatabaseRequest, - GetTableRequest, - HiveDatabaseOptions, - HiveTableOptions, - ListCatalogsRequest, - ListCatalogsResponse, - ListDatabasesRequest, - ListDatabasesResponse, - ListLocksRequest, - ListLocksResponse, - ListTablesRequest, - ListTablesResponse, - Lock, - RenameTableRequest, - Table, - UpdateDatabaseRequest, - UpdateTableRequest, - TableView, -) - -__all__ = ( - 'Catalog', - 'CheckLockRequest', - 'CreateCatalogRequest', - 'CreateDatabaseRequest', - 'CreateLockRequest', - 'CreateTableRequest', - 'Database', - 'DeleteCatalogRequest', - 'DeleteDatabaseRequest', - 'DeleteLockRequest', - 'DeleteTableRequest', - 'GetCatalogRequest', - 'GetDatabaseRequest', - 'GetTableRequest', - 'HiveDatabaseOptions', - 'HiveTableOptions', - 'ListCatalogsRequest', - 'ListCatalogsResponse', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'ListLocksRequest', - 'ListLocksResponse', - 'ListTablesRequest', - 'ListTablesResponse', - 'Lock', - 'RenameTableRequest', - 'Table', - 'UpdateDatabaseRequest', - 'UpdateTableRequest', - 'TableView', -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/types/metastore.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/types/metastore.py deleted file mode 100644 index 0f63129ab468..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/google/cloud/bigquery_biglake_v1alpha1/types/metastore.py +++ /dev/null @@ -1,1066 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.biglake.v1alpha1', - manifest={ - 'TableView', - 'Catalog', - 'Database', - 'Table', - 'Lock', - 'CreateCatalogRequest', - 'DeleteCatalogRequest', - 'GetCatalogRequest', - 'ListCatalogsRequest', - 'ListCatalogsResponse', - 'CreateDatabaseRequest', - 'DeleteDatabaseRequest', - 'UpdateDatabaseRequest', - 'GetDatabaseRequest', - 'ListDatabasesRequest', - 'ListDatabasesResponse', - 'CreateTableRequest', - 'DeleteTableRequest', - 'UpdateTableRequest', - 'RenameTableRequest', - 'GetTableRequest', - 'ListTablesRequest', - 'ListTablesResponse', - 'CreateLockRequest', - 'DeleteLockRequest', - 'CheckLockRequest', - 'ListLocksRequest', - 'ListLocksResponse', - 'HiveDatabaseOptions', - 'HiveTableOptions', - }, -) - - -class TableView(proto.Enum): - r"""View on Table. Represents which fields will be populated for - calls that return Table objects. - - Values: - TABLE_VIEW_UNSPECIFIED (0): - Default value. The API will default to the - BASIC view. - BASIC (1): - Include only table names. - This is the default value. - FULL (2): - Include everything. - """ - TABLE_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - -class Catalog(proto.Message): - r"""Catalog is the container of databases. - - Attributes: - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the - catalog. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last modification time of - the catalog. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deletion time of the - catalog. Only set after the catalog is deleted. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this catalog is - considered expired. Only set after the catalog - is deleted. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class Database(proto.Message): - r"""Database is the container of tables. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hive_options (google.cloud.bigquery_biglake_v1alpha1.types.HiveDatabaseOptions): - Options of a Hive database. - - This field is a member of `oneof`_ ``options``. - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the - database. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last modification time of - the database. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deletion time of the - database. Only set after the database is - deleted. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this database is - considered expired. Only set after the database - is deleted. - type_ (google.cloud.bigquery_biglake_v1alpha1.types.Database.Type): - The database type. - """ - class Type(proto.Enum): - r"""The database type. - - Values: - TYPE_UNSPECIFIED (0): - The type is not specified. - HIVE (1): - Represents a database storing tables - compatible with Hive Metastore tables. - """ - TYPE_UNSPECIFIED = 0 - HIVE = 1 - - hive_options: 'HiveDatabaseOptions' = proto.Field( - proto.MESSAGE, - number=7, - oneof='options', - message='HiveDatabaseOptions', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - type_: Type = proto.Field( - proto.ENUM, - number=6, - enum=Type, - ) - - -class Table(proto.Message): - r"""Represents a table. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - hive_options (google.cloud.bigquery_biglake_v1alpha1.types.HiveTableOptions): - Options of a Hive table. - - This field is a member of `oneof`_ ``options``. - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the table. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last modification time of - the table. - delete_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The deletion time of the table. - Only set after the table is deleted. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time when this table is - considered expired. Only set after the table is - deleted. - type_ (google.cloud.bigquery_biglake_v1alpha1.types.Table.Type): - The table type. - etag (str): - The checksum of a table object computed by - the server based on the value of other fields. - It may be sent on update requests to ensure the - client has an up-to-date value before - proceeding. It is only checked for update table - operations. - """ - class Type(proto.Enum): - r"""The table type. - - Values: - TYPE_UNSPECIFIED (0): - The type is not specified. - HIVE (1): - Represents a table compatible with Hive - Metastore tables. - """ - TYPE_UNSPECIFIED = 0 - HIVE = 1 - - hive_options: 'HiveTableOptions' = proto.Field( - proto.MESSAGE, - number=7, - oneof='options', - message='HiveTableOptions', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - delete_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - type_: Type = proto.Field( - proto.ENUM, - number=6, - enum=Type, - ) - etag: str = proto.Field( - proto.STRING, - number=8, - ) - - -class Lock(proto.Message): - r"""Represents a lock. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - table_id (str): - The table ID (not fully qualified name) in - the same database that the lock will be created - on. The table must exist. - - This field is a member of `oneof`_ ``resources``. - name (str): - Output only. The resource name. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation time of the lock. - type_ (google.cloud.bigquery_biglake_v1alpha1.types.Lock.Type): - The lock type. - state (google.cloud.bigquery_biglake_v1alpha1.types.Lock.State): - Output only. The lock state. - """ - class Type(proto.Enum): - r"""The lock type. - - Values: - TYPE_UNSPECIFIED (0): - The type is not specified. - EXCLUSIVE (1): - An exclusive lock prevents another lock from - being created on the same resource. - """ - TYPE_UNSPECIFIED = 0 - EXCLUSIVE = 1 - - class State(proto.Enum): - r"""The lock state. - - Values: - STATE_UNSPECIFIED (0): - The state is not specified. - WAITING (1): - Waiting to acquire the lock. - ACQUIRED (2): - The lock has been acquired. - """ - STATE_UNSPECIFIED = 0 - WAITING = 1 - ACQUIRED = 2 - - table_id: str = proto.Field( - proto.STRING, - number=5, - oneof='resources', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - type_: Type = proto.Field( - proto.ENUM, - number=3, - enum=Type, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - - -class CreateCatalogRequest(proto.Message): - r"""Request message for the CreateCatalog method. - - Attributes: - parent (str): - Required. The parent resource where this catalog will be - created. Format: - projects/{project_id_or_number}/locations/{location_id} - catalog (google.cloud.bigquery_biglake_v1alpha1.types.Catalog): - Required. The catalog to create. The ``name`` field does not - need to be provided. - catalog_id (str): - Required. The ID to use for the catalog, - which will become the final component of the - catalog's resource name. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - catalog: 'Catalog' = proto.Field( - proto.MESSAGE, - number=2, - message='Catalog', - ) - catalog_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteCatalogRequest(proto.Message): - r"""Request message for the DeleteCatalog method. - - Attributes: - name (str): - Required. The name of the catalog to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetCatalogRequest(proto.Message): - r"""Request message for the GetCatalog method. - - Attributes: - name (str): - Required. The name of the catalog to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListCatalogsRequest(proto.Message): - r"""Request message for the ListCatalogs method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - catalogs. Format: - projects/{project_id_or_number}/locations/{location_id} - page_size (int): - The maximum number of catalogs to return. The - service may return fewer than this value. - If unspecified, at most 50 catalogs will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListCatalogs`` - call. Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListCatalogs`` must match the call that provided the page - token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListCatalogsResponse(proto.Message): - r"""Response message for the ListCatalogs method. - - Attributes: - catalogs (MutableSequence[google.cloud.bigquery_biglake_v1alpha1.types.Catalog]): - The catalogs from the specified project. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - catalogs: MutableSequence['Catalog'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Catalog', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateDatabaseRequest(proto.Message): - r"""Request message for the CreateDatabase method. - - Attributes: - parent (str): - Required. The parent resource where this database will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - database (google.cloud.bigquery_biglake_v1alpha1.types.Database): - Required. The database to create. The ``name`` field does - not need to be provided. - database_id (str): - Required. The ID to use for the database, - which will become the final component of the - database's resource name. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - database: 'Database' = proto.Field( - proto.MESSAGE, - number=2, - message='Database', - ) - database_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteDatabaseRequest(proto.Message): - r"""Request message for the DeleteDatabase method. - - Attributes: - name (str): - Required. The name of the database to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateDatabaseRequest(proto.Message): - r"""Request message for the UpdateDatabase method. - - Attributes: - database (google.cloud.bigquery_biglake_v1alpha1.types.Database): - Required. The database to update. - - The database's ``name`` field is used to identify the - database to update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - """ - - database: 'Database' = proto.Field( - proto.MESSAGE, - number=1, - message='Database', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetDatabaseRequest(proto.Message): - r"""Request message for the GetDatabase method. - - Attributes: - name (str): - Required. The name of the database to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDatabasesRequest(proto.Message): - r"""Request message for the ListDatabases method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of - databases. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id} - page_size (int): - The maximum number of databases to return. - The service may return fewer than this value. If - unspecified, at most 50 databases will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListDatabases`` - call. Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListDatabases`` must match the call that provided the page - token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDatabasesResponse(proto.Message): - r"""Response message for the ListDatabases method. - - Attributes: - databases (MutableSequence[google.cloud.bigquery_biglake_v1alpha1.types.Database]): - The databases from the specified catalog. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - databases: MutableSequence['Database'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Database', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateTableRequest(proto.Message): - r"""Request message for the CreateTable method. - - Attributes: - parent (str): - Required. The parent resource where this table will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - table (google.cloud.bigquery_biglake_v1alpha1.types.Table): - Required. The table to create. The ``name`` field does not - need to be provided for the table creation. - table_id (str): - Required. The ID to use for the table, which - will become the final component of the table's - resource name. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - table: 'Table' = proto.Field( - proto.MESSAGE, - number=2, - message='Table', - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteTableRequest(proto.Message): - r"""Request message for the DeleteTable method. - - Attributes: - name (str): - Required. The name of the table to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTableRequest(proto.Message): - r"""Request message for the UpdateTable method. - - Attributes: - table (google.cloud.bigquery_biglake_v1alpha1.types.Table): - Required. The table to update. - - The table's ``name`` field is used to identify the table to - update. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to update. - - For the ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - """ - - table: 'Table' = proto.Field( - proto.MESSAGE, - number=1, - message='Table', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RenameTableRequest(proto.Message): - r"""Request message for the RenameTable method in - MetastoreService - - Attributes: - name (str): - Required. The table's ``name`` field is used to identify the - table to rename. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - new_name (str): - Required. The new ``name`` for the specified table, must be - in the same database. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_name: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetTableRequest(proto.Message): - r"""Request message for the GetTable method. - - Attributes: - name (str): - Required. The name of the table to retrieve. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/tables/{table_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTablesRequest(proto.Message): - r"""Request message for the ListTables method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of tables. - Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - page_size (int): - The maximum number of tables to return. The - service may return fewer than this value. - If unspecified, at most 50 tables will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListTables`` call. - Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListTables`` must match the call that provided the page - token. - view (google.cloud.bigquery_biglake_v1alpha1.types.TableView): - The view for the returned tables. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - view: 'TableView' = proto.Field( - proto.ENUM, - number=4, - enum='TableView', - ) - - -class ListTablesResponse(proto.Message): - r"""Response message for the ListTables method. - - Attributes: - tables (MutableSequence[google.cloud.bigquery_biglake_v1alpha1.types.Table]): - The tables from the specified database. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - tables: MutableSequence['Table'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Table', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateLockRequest(proto.Message): - r"""Request message for the CreateLock method. - - Attributes: - parent (str): - Required. The parent resource where this lock will be - created. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - lock (google.cloud.bigquery_biglake_v1alpha1.types.Lock): - Required. The lock to create. The ``name`` field does not - need to be provided for the lock creation. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - lock: 'Lock' = proto.Field( - proto.MESSAGE, - number=2, - message='Lock', - ) - - -class DeleteLockRequest(proto.Message): - r"""Request message for the DeleteLock method. - - Attributes: - name (str): - Required. The name of the lock to delete. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CheckLockRequest(proto.Message): - r"""Request message for the CheckLock method. - - Attributes: - name (str): - Required. The name of the lock to check. Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id}/locks/{lock_id} - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListLocksRequest(proto.Message): - r"""Request message for the ListLocks method. - - Attributes: - parent (str): - Required. The parent, which owns this collection of locks. - Format: - projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}/databases/{database_id} - page_size (int): - The maximum number of locks to return. The - service may return fewer than this value. - If unspecified, at most 50 locks will be - returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. - page_token (str): - A page token, received from a previous ``ListLocks`` call. - Provide this to retrieve the subsequent page. - - When paginating, all other parameters provided to - ``ListLocks`` must match the call that provided the page - token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListLocksResponse(proto.Message): - r"""Response message for the ListLocks method. - - Attributes: - locks (MutableSequence[google.cloud.bigquery_biglake_v1alpha1.types.Lock]): - The locks from the specified database. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - locks: MutableSequence['Lock'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Lock', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class HiveDatabaseOptions(proto.Message): - r"""Options of a Hive database. - - Attributes: - location_uri (str): - Cloud Storage folder URI where the database - data is stored, starting with "gs://". - parameters (MutableMapping[str, str]): - Stores user supplied Hive database - parameters. - """ - - location_uri: str = proto.Field( - proto.STRING, - number=1, - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - - -class HiveTableOptions(proto.Message): - r"""Options of a Hive table. - - Attributes: - parameters (MutableMapping[str, str]): - Stores user supplied Hive table parameters. - table_type (str): - Hive table type. For example, MANAGED_TABLE, EXTERNAL_TABLE. - storage_descriptor (google.cloud.bigquery_biglake_v1alpha1.types.HiveTableOptions.StorageDescriptor): - Stores physical storage information of the - data. - """ - - class SerDeInfo(proto.Message): - r"""Serializer and deserializer information. - - Attributes: - serialization_lib (str): - The fully qualified Java class name of the - serialization library. - """ - - serialization_lib: str = proto.Field( - proto.STRING, - number=1, - ) - - class StorageDescriptor(proto.Message): - r"""Stores physical storage information of the data. - - Attributes: - location_uri (str): - Cloud Storage folder URI where the table data - is stored, starting with "gs://". - input_format (str): - The fully qualified Java class name of the - input format. - output_format (str): - The fully qualified Java class name of the - output format. - serde_info (google.cloud.bigquery_biglake_v1alpha1.types.HiveTableOptions.SerDeInfo): - Serializer and deserializer information. - """ - - location_uri: str = proto.Field( - proto.STRING, - number=1, - ) - input_format: str = proto.Field( - proto.STRING, - number=2, - ) - output_format: str = proto.Field( - proto.STRING, - number=3, - ) - serde_info: 'HiveTableOptions.SerDeInfo' = proto.Field( - proto.MESSAGE, - number=4, - message='HiveTableOptions.SerDeInfo', - ) - - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=1, - ) - table_type: str = proto.Field( - proto.STRING, - number=2, - ) - storage_descriptor: StorageDescriptor = proto.Field( - proto.MESSAGE, - number=3, - message=StorageDescriptor, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/noxfile.py deleted file mode 100644 index 4becbc8d36e7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-biglake' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_biglake_v1alpha1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_biglake_v1alpha1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_async.py deleted file mode 100644 index 57d79b1854f8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CheckLock -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CheckLock_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_check_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CheckLockRequest( - name="name_value", - ) - - # Make the request - response = await client.check_lock(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CheckLock_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_sync.py deleted file mode 100644 index cee98b234249..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_check_lock_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CheckLock -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CheckLock_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_check_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CheckLockRequest( - name="name_value", - ) - - # Make the request - response = client.check_lock(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CheckLock_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_async.py deleted file mode 100644 index 356b7647f34e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = await client.create_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py deleted file mode 100644 index e15a74509d1e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_create_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateCatalogRequest( - parent="parent_value", - catalog_id="catalog_id_value", - ) - - # Make the request - response = client.create_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_async.py deleted file mode 100644 index 06de9d97c899..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_create_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = await client.create_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_sync.py deleted file mode 100644 index f950f29516a3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_database_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_create_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateDatabaseRequest( - parent="parent_value", - database_id="database_id_value", - ) - - # Make the request - response = client.create_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_async.py deleted file mode 100644 index 1df4a64fd7b9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateLock -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateLock_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_create_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - lock = bigquery_biglake_v1alpha1.Lock() - lock.table_id = "table_id_value" - - request = bigquery_biglake_v1alpha1.CreateLockRequest( - parent="parent_value", - lock=lock, - ) - - # Make the request - response = await client.create_lock(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateLock_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_sync.py deleted file mode 100644 index 4e6f3a9d68b7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_lock_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateLock -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateLock_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_create_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - lock = bigquery_biglake_v1alpha1.Lock() - lock.table_id = "table_id_value" - - request = bigquery_biglake_v1alpha1.CreateLockRequest( - parent="parent_value", - lock=lock, - ) - - # Make the request - response = client.create_lock(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateLock_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_async.py deleted file mode 100644 index 4f1bddf667d1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_create_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = await client.create_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_sync.py deleted file mode 100644 index 170db1f528c1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_create_table_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_CreateTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_create_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.CreateTableRequest( - parent="parent_value", - table_id="table_id_value", - ) - - # Make the request - response = client.create_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_CreateTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py deleted file mode 100644 index 480773ade96a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py deleted file mode 100644 index 0f60661f9293..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_delete_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.delete_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_async.py deleted file mode 100644 index bc1a5ca1856e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_sync.py deleted file mode 100644 index fb050b5db9d4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_delete_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.delete_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_async.py deleted file mode 100644 index eb47994243cb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteLock -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_delete_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteLockRequest( - name="name_value", - ) - - # Make the request - await client.delete_lock(request=request) - - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py deleted file mode 100644 index b3af1230af72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteLock -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_delete_lock(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteLockRequest( - name="name_value", - ) - - # Make the request - client.delete_lock(request=request) - - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_async.py deleted file mode 100644 index 7928567f276b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = await client.delete_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_sync.py deleted file mode 100644 index ddffef07cf31..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_delete_table_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_delete_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.DeleteTableRequest( - name="name_value", - ) - - # Make the request - response = client.delete_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_async.py deleted file mode 100644 index 4a848236452c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = await client.get_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py deleted file mode 100644 index 86a19ee357cd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCatalog -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_get_catalog(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetCatalogRequest( - name="name_value", - ) - - # Make the request - response = client.get_catalog(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_async.py deleted file mode 100644 index 2ef459a0df45..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_get_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = await client.get_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_sync.py deleted file mode 100644 index a60627aa1da4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_database_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_get_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetDatabaseRequest( - name="name_value", - ) - - # Make the request - response = client.get_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_async.py deleted file mode 100644 index a3d700b4a0c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_GetTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_get_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = await client.get_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_GetTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_sync.py deleted file mode 100644 index 588451aa0634..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_get_table_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_GetTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_get_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.GetTableRequest( - name="name_value", - ) - - # Make the request - response = client.get_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_GetTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py deleted file mode 100644 index 6eb584f12dc7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCatalogs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py deleted file mode 100644 index 841384821c03..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCatalogs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_list_catalogs(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListCatalogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_catalogs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_async.py deleted file mode 100644 index b9a9d8b63457..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_sync.py deleted file mode 100644 index 0391356baaf7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_databases_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDatabases -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_list_databases(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListDatabasesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_databases(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_async.py deleted file mode 100644 index ff601d90a0ab..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLocks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListLocks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_list_locks(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListLocksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_locks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListLocks_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_sync.py deleted file mode 100644 index f9ef2d084a43..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_locks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListLocks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListLocks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_list_locks(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListLocksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_locks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListLocks_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_async.py deleted file mode 100644 index 96ecbcbe26c9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTables -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListTables_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListTables_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_sync.py deleted file mode 100644 index 1f69c6cb9634..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_list_tables_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTables -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_ListTables_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_list_tables(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.ListTablesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_tables(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_ListTables_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_async.py deleted file mode 100644 index d88210088044..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_RenameTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = await client.rename_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_RenameTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_sync.py deleted file mode 100644 index 86b35df4a543..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_rename_table_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_RenameTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_rename_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.RenameTableRequest( - name="name_value", - new_name="new_name_value", - ) - - # Make the request - response = client.rename_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_RenameTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_async.py deleted file mode 100644 index f2dff38a17ed..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_update_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateDatabaseRequest( - ) - - # Make the request - response = await client.update_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_sync.py deleted file mode 100644 index 01ac5dff63a7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_database_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDatabase -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_update_database(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateDatabaseRequest( - ) - - # Make the request - response = client.update_database(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_async.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_async.py deleted file mode 100644 index 27ba54fbb2d1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -async def sample_update_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateTableRequest( - ) - - # Make the request - response = await client.update_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_async] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_sync.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_sync.py deleted file mode 100644 index 2da7d0ef18a8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/biglake_v1alpha1_generated_metastore_service_update_table_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTable -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-biglake - - -# [START biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_biglake_v1alpha1 - - -def sample_update_table(): - # Create a client - client = bigquery_biglake_v1alpha1.MetastoreServiceClient() - - # Initialize request argument(s) - request = bigquery_biglake_v1alpha1.UpdateTableRequest( - ) - - # Make the request - response = client.update_table(request=request) - - # Handle the response - print(response) - -# [END biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json deleted file mode 100644 index 69294cf144a2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ /dev/null @@ -1,3148 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.biglake.v1alpha1", - "version": "v1alpha1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-biglake", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.check_lock", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CheckLock", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CheckLock" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CheckLockRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Lock", - "shortName": "check_lock" - }, - "description": "Sample for CheckLock", - "file": "biglake_v1alpha1_generated_metastore_service_check_lock_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CheckLock_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_check_lock_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.check_lock", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CheckLock", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CheckLock" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CheckLockRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Lock", - "shortName": "check_lock" - }, - "description": "Sample for CheckLock", - "file": "biglake_v1alpha1_generated_metastore_service_check_lock_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CheckLock_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_check_lock_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.create_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateCatalogRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "catalog", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog" - }, - { - "name": "catalog_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog", - "shortName": "create_catalog" - }, - "description": "Sample for CreateCatalog", - "file": "biglake_v1alpha1_generated_metastore_service_create_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateCatalog_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.create_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateCatalogRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "catalog", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog" - }, - { - "name": "catalog_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog", - "shortName": "create_catalog" - }, - "description": "Sample for CreateCatalog", - "file": "biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateCatalog_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.create_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Database" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_create_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateDatabase_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.create_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateDatabaseRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Database" - }, - { - "name": "database_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "create_database" - }, - "description": "Sample for CreateDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_create_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateDatabase_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.create_lock", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateLock", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateLock" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateLockRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "lock", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Lock" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Lock", - "shortName": "create_lock" - }, - "description": "Sample for CreateLock", - "file": "biglake_v1alpha1_generated_metastore_service_create_lock_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateLock_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_lock_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.create_lock", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateLock", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateLock" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateLockRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "lock", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Lock" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Lock", - "shortName": "create_lock" - }, - "description": "Sample for CreateLock", - "file": "biglake_v1alpha1_generated_metastore_service_create_lock_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateLock_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_lock_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.create_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateTableRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Table" - }, - { - "name": "table_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "create_table" - }, - "description": "Sample for CreateTable", - "file": "biglake_v1alpha1_generated_metastore_service_create_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateTable_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.create_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.CreateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "CreateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.CreateTableRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Table" - }, - { - "name": "table_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "create_table" - }, - "description": "Sample for CreateTable", - "file": "biglake_v1alpha1_generated_metastore_service_create_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_CreateTable_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_create_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.delete_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog", - "shortName": "delete_catalog" - }, - "description": "Sample for DeleteCatalog", - "file": "biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.delete_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog", - "shortName": "delete_catalog" - }, - "description": "Sample for DeleteCatalog", - "file": "biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteCatalog_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.delete_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "delete_database" - }, - "description": "Sample for DeleteDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_delete_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.delete_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "delete_database" - }, - "description": "Sample for DeleteDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_delete_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.delete_lock", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteLock", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteLock" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteLockRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_lock" - }, - "description": "Sample for DeleteLock", - "file": "biglake_v1alpha1_generated_metastore_service_delete_lock_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteLock_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_lock_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.delete_lock", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteLock", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteLock" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteLockRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_lock" - }, - "description": "Sample for DeleteLock", - "file": "biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteLock_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_lock_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.delete_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "delete_table" - }, - "description": "Sample for DeleteTable", - "file": "biglake_v1alpha1_generated_metastore_service_delete_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteTable_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.delete_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.DeleteTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "DeleteTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.DeleteTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "delete_table" - }, - "description": "Sample for DeleteTable", - "file": "biglake_v1alpha1_generated_metastore_service_delete_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_DeleteTable_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_delete_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.get_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.GetCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog", - "shortName": "get_catalog" - }, - "description": "Sample for GetCatalog", - "file": "biglake_v1alpha1_generated_metastore_service_get_catalog_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetCatalog_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_get_catalog_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.get_catalog", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetCatalog", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetCatalog" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.GetCatalogRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Catalog", - "shortName": "get_catalog" - }, - "description": "Sample for GetCatalog", - "file": "biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetCatalog_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_get_catalog_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.get_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_get_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetDatabase_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_get_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.get_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.GetDatabaseRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "get_database" - }, - "description": "Sample for GetDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_get_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetDatabase_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_get_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.get_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.GetTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "get_table" - }, - "description": "Sample for GetTable", - "file": "biglake_v1alpha1_generated_metastore_service_get_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetTable_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_get_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.get_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.GetTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "GetTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.GetTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "get_table" - }, - "description": "Sample for GetTable", - "file": "biglake_v1alpha1_generated_metastore_service_get_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_GetTable_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_get_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.list_catalogs", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListCatalogs", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListCatalogs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsAsyncPager", - "shortName": "list_catalogs" - }, - "description": "Sample for ListCatalogs", - "file": "biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListCatalogs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_catalogs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.list_catalogs", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListCatalogs", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListCatalogs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListCatalogsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListCatalogsPager", - "shortName": "list_catalogs" - }, - "description": "Sample for ListCatalogs", - "file": "biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListCatalogs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_catalogs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.list_databases", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListDatabases", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesAsyncPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "biglake_v1alpha1_generated_metastore_service_list_databases_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListDatabases_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_databases_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.list_databases", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListDatabases", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListDatabases" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListDatabasesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListDatabasesPager", - "shortName": "list_databases" - }, - "description": "Sample for ListDatabases", - "file": "biglake_v1alpha1_generated_metastore_service_list_databases_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListDatabases_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_databases_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.list_locks", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListLocks", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListLocks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListLocksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListLocksAsyncPager", - "shortName": "list_locks" - }, - "description": "Sample for ListLocks", - "file": "biglake_v1alpha1_generated_metastore_service_list_locks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListLocks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_locks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.list_locks", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListLocks", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListLocks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListLocksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListLocksPager", - "shortName": "list_locks" - }, - "description": "Sample for ListLocks", - "file": "biglake_v1alpha1_generated_metastore_service_list_locks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListLocks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_locks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.list_tables", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListTables", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListTables" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListTablesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListTablesAsyncPager", - "shortName": "list_tables" - }, - "description": "Sample for ListTables", - "file": "biglake_v1alpha1_generated_metastore_service_list_tables_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListTables_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_tables_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.list_tables", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.ListTables", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "ListTables" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.ListTablesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.pagers.ListTablesPager", - "shortName": "list_tables" - }, - "description": "Sample for ListTables", - "file": "biglake_v1alpha1_generated_metastore_service_list_tables_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_ListTables_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_list_tables_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.rename_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.RenameTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "RenameTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.RenameTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "rename_table" - }, - "description": "Sample for RenameTable", - "file": "biglake_v1alpha1_generated_metastore_service_rename_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_RenameTable_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_rename_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.rename_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.RenameTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "RenameTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.RenameTableRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "rename_table" - }, - "description": "Sample for RenameTable", - "file": "biglake_v1alpha1_generated_metastore_service_rename_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_RenameTable_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_rename_table_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.update_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_update_database_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_update_database_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.update_database", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateDatabase", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateDatabase" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.UpdateDatabaseRequest" - }, - { - "name": "database", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Database" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Database", - "shortName": "update_database" - }, - "description": "Sample for UpdateDatabase", - "file": "biglake_v1alpha1_generated_metastore_service_update_database_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateDatabase_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_update_database_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient", - "shortName": "MetastoreServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceAsyncClient.update_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.UpdateTableRequest" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Table" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "update_table" - }, - "description": "Sample for UpdateTable", - "file": "biglake_v1alpha1_generated_metastore_service_update_table_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateTable_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_update_table_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient", - "shortName": "MetastoreServiceClient" - }, - "fullName": "google.cloud.bigquery_biglake_v1alpha1.MetastoreServiceClient.update_table", - "method": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService.UpdateTable", - "service": { - "fullName": "google.cloud.bigquery.biglake.v1alpha1.MetastoreService", - "shortName": "MetastoreService" - }, - "shortName": "UpdateTable" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.UpdateTableRequest" - }, - { - "name": "table", - "type": "google.cloud.bigquery_biglake_v1alpha1.types.Table" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_biglake_v1alpha1.types.Table", - "shortName": "update_table" - }, - "description": "Sample for UpdateTable", - "file": "biglake_v1alpha1_generated_metastore_service_update_table_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "biglake_v1alpha1_generated_MetastoreService_UpdateTable_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "biglake_v1alpha1_generated_metastore_service_update_table_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/scripts/fixup_bigquery_biglake_v1alpha1_keywords.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/scripts/fixup_bigquery_biglake_v1alpha1_keywords.py deleted file mode 100644 index 0bdfc3465a45..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/scripts/fixup_bigquery_biglake_v1alpha1_keywords.py +++ /dev/null @@ -1,194 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_biglakeCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'check_lock': ('name', ), - 'create_catalog': ('parent', 'catalog', 'catalog_id', ), - 'create_database': ('parent', 'database', 'database_id', ), - 'create_lock': ('parent', 'lock', ), - 'create_table': ('parent', 'table', 'table_id', ), - 'delete_catalog': ('name', ), - 'delete_database': ('name', ), - 'delete_lock': ('name', ), - 'delete_table': ('name', ), - 'get_catalog': ('name', ), - 'get_database': ('name', ), - 'get_table': ('name', ), - 'list_catalogs': ('parent', 'page_size', 'page_token', ), - 'list_databases': ('parent', 'page_size', 'page_token', ), - 'list_locks': ('parent', 'page_size', 'page_token', ), - 'list_tables': ('parent', 'page_size', 'page_token', 'view', ), - 'rename_table': ('name', 'new_name', ), - 'update_database': ('database', 'update_mask', ), - 'update_table': ('table', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_biglakeCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_biglake client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/setup.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/setup.py deleted file mode 100644 index 196451d4b5ad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-biglake' - - -description = "Google Cloud Bigquery Biglake API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_biglake/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-biglake" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/bigquery_biglake_v1alpha1/__init__.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/bigquery_biglake_v1alpha1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/bigquery_biglake_v1alpha1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py b/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py deleted file mode 100644 index b7c6afc8fd9c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-biglake/v1alpha1/tests/unit/gapic/bigquery_biglake_v1alpha1/test_metastore_service.py +++ /dev/null @@ -1,15762 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service import MetastoreServiceAsyncClient -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service import MetastoreServiceClient -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service import pagers -from google.cloud.bigquery_biglake_v1alpha1.services.metastore_service import transports -from google.cloud.bigquery_biglake_v1alpha1.types import metastore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetastoreServiceClient._get_default_mtls_endpoint(None) is None - assert MetastoreServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetastoreServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MetastoreServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MetastoreServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - MetastoreServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MetastoreServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert MetastoreServiceClient._get_client_cert_source(None, False) is None - assert MetastoreServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MetastoreServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MetastoreServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MetastoreServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert MetastoreServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MetastoreServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetastoreServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MetastoreServiceClient._get_api_endpoint(None, None, default_universe, "always") == MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetastoreServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetastoreServiceClient.DEFAULT_MTLS_ENDPOINT - assert MetastoreServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MetastoreServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - MetastoreServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert MetastoreServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MetastoreServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MetastoreServiceClient._get_universe_domain(None, None) == MetastoreServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - MetastoreServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = MetastoreServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = MetastoreServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetastoreServiceClient, "grpc"), - (MetastoreServiceAsyncClient, "grpc_asyncio"), - (MetastoreServiceClient, "rest"), -]) -def test_metastore_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'biglake.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://biglake.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetastoreServiceGrpcTransport, "grpc"), - (transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MetastoreServiceRestTransport, "rest"), -]) -def test_metastore_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetastoreServiceClient, "grpc"), - (MetastoreServiceAsyncClient, "grpc_asyncio"), - (MetastoreServiceClient, "rest"), -]) -def test_metastore_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'biglake.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://biglake.googleapis.com' - ) - - -def test_metastore_service_client_get_transport_class(): - transport = MetastoreServiceClient.get_transport_class() - available_transports = [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceRestTransport, - ] - assert transport in available_transports - - transport = MetastoreServiceClient.get_transport_class("grpc") - assert transport == transports.MetastoreServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), -]) -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -def test_metastore_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetastoreServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetastoreServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", "true"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", "false"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest", "true"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metastore_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MetastoreServiceClient, MetastoreServiceAsyncClient -]) -@mock.patch.object(MetastoreServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetastoreServiceAsyncClient)) -def test_metastore_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - MetastoreServiceClient, MetastoreServiceAsyncClient -]) -@mock.patch.object(MetastoreServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceClient)) -@mock.patch.object(MetastoreServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetastoreServiceAsyncClient)) -def test_metastore_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = MetastoreServiceClient._DEFAULT_UNIVERSE - default_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MetastoreServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc"), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest"), -]) -def test_metastore_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", grpc_helpers), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MetastoreServiceClient, transports.MetastoreServiceRestTransport, "rest", None), -]) -def test_metastore_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_metastore_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetastoreServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport, "grpc", grpc_helpers), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metastore_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "biglake.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="biglake.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateCatalogRequest, - dict, -]) -def test_create_catalog(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog( - name='name_value', - ) - response = client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -def test_create_catalog_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateCatalogRequest( - parent='parent_value', - catalog_id='catalog_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_catalog(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateCatalogRequest( - parent='parent_value', - catalog_id='catalog_id_value', - ) - -def test_create_catalog_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_catalog] = mock_rpc - request = {} - client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_catalog_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_catalog in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_catalog] = mock_rpc - - request = {} - await client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_catalog_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateCatalogRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - response = await client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_catalog_async_from_dict(): - await test_create_catalog_async(request_type=dict) - -def test_create_catalog_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateCatalogRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_catalog_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateCatalogRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - await client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_catalog_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_catalog( - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].catalog - mock_val = metastore.Catalog(name='name_value') - assert arg == mock_val - arg = args[0].catalog_id - mock_val = 'catalog_id_value' - assert arg == mock_val - - -def test_create_catalog_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_catalog( - metastore.CreateCatalogRequest(), - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - -@pytest.mark.asyncio -async def test_create_catalog_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_catalog( - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].catalog - mock_val = metastore.Catalog(name='name_value') - assert arg == mock_val - arg = args[0].catalog_id - mock_val = 'catalog_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_catalog_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_catalog( - metastore.CreateCatalogRequest(), - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteCatalogRequest, - dict, -]) -def test_delete_catalog(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog( - name='name_value', - ) - response = client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -def test_delete_catalog_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteCatalogRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_catalog(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteCatalogRequest( - name='name_value', - ) - -def test_delete_catalog_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_catalog] = mock_rpc - request = {} - client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_catalog_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_catalog in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_catalog] = mock_rpc - - request = {} - await client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_catalog_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteCatalogRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - response = await client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_catalog_async_from_dict(): - await test_delete_catalog_async(request_type=dict) - -def test_delete_catalog_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_catalog_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - await client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_catalog_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_catalog_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_catalog( - metastore.DeleteCatalogRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_catalog_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_catalog_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_catalog( - metastore.DeleteCatalogRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetCatalogRequest, - dict, -]) -def test_get_catalog(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog( - name='name_value', - ) - response = client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.GetCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -def test_get_catalog_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.GetCatalogRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_catalog(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetCatalogRequest( - name='name_value', - ) - -def test_get_catalog_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_catalog] = mock_rpc - request = {} - client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_catalog_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_catalog in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_catalog] = mock_rpc - - request = {} - await client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_catalog_async(transport: str = 'grpc_asyncio', request_type=metastore.GetCatalogRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - response = await client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.GetCatalogRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_catalog_async_from_dict(): - await test_get_catalog_async(request_type=dict) - -def test_get_catalog_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_catalog_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetCatalogRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - await client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_catalog_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_catalog_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_catalog( - metastore.GetCatalogRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_catalog_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Catalog() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_catalog( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_catalog_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_catalog( - metastore.GetCatalogRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListCatalogsRequest, - dict, -]) -def test_list_catalogs(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListCatalogsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCatalogsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_catalogs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListCatalogsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_catalogs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListCatalogsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_catalogs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_catalogs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_catalogs] = mock_rpc - request = {} - client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_catalogs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_catalogs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_catalogs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_catalogs] = mock_rpc - - request = {} - await client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_catalogs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_catalogs_async(transport: str = 'grpc_asyncio', request_type=metastore.ListCatalogsRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListCatalogsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCatalogsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_catalogs_async_from_dict(): - await test_list_catalogs_async(request_type=dict) - -def test_list_catalogs_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListCatalogsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value = metastore.ListCatalogsResponse() - client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_catalogs_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListCatalogsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse()) - await client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_catalogs_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListCatalogsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_catalogs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_catalogs_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_catalogs( - metastore.ListCatalogsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_catalogs_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListCatalogsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_catalogs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_catalogs_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_catalogs( - metastore.ListCatalogsRequest(), - parent='parent_value', - ) - - -def test_list_catalogs_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_catalogs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Catalog) - for i in results) -def test_list_catalogs_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - pages = list(client.list_catalogs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_catalogs_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_catalogs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Catalog) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_catalogs_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_catalogs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metastore.CreateDatabaseRequest, - dict, -]) -def test_create_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_create_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateDatabaseRequest( - parent='parent_value', - database_id='database_id_value', - ) - -def test_create_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_database] = mock_rpc - - request = {} - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_database_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_create_database_async_from_dict(): - await test_create_database_async(request_type=dict) - -def test_create_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = metastore.Database() - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateDatabaseRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_database( - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - - -def test_create_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - metastore.CreateDatabaseRequest(), - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - -@pytest.mark.asyncio -async def test_create_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_database( - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].database_id - mock_val = 'database_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_database( - metastore.CreateDatabaseRequest(), - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteDatabaseRequest, - dict, -]) -def test_delete_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_delete_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteDatabaseRequest( - name='name_value', - ) - -def test_delete_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc - request = {} - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_database] = mock_rpc - - request = {} - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_database_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_delete_database_async_from_dict(): - await test_delete_database_async(request_type=dict) - -def test_delete_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = metastore.Database() - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - metastore.DeleteDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_database( - metastore.DeleteDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateDatabaseRequest, - dict, -]) -def test_update_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_update_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.UpdateDatabaseRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateDatabaseRequest( - ) - -def test_update_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_database] = mock_rpc - - request = {} - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_database_async(transport: str = 'grpc_asyncio', request_type=metastore.UpdateDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.UpdateDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_update_database_async_from_dict(): - await test_update_database_async(request_type=dict) - -def test_update_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = metastore.Database() - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateDatabaseRequest() - - request.database.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'database.name=name_value', - ) in kw['metadata'] - - -def test_update_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_database( - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - metastore.UpdateDatabaseRequest(), - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_database( - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].database - mock_val = metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_database( - metastore.UpdateDatabaseRequest(), - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetDatabaseRequest, - dict, -]) -def test_get_database(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - response = client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -def test_get_database_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.GetDatabaseRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_database(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetDatabaseRequest( - name='name_value', - ) - -def test_get_database_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_database in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_database] = mock_rpc - - request = {} - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_database_async(transport: str = 'grpc_asyncio', request_type=metastore.GetDatabaseRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - response = await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.GetDatabaseRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.asyncio -async def test_get_database_async_from_dict(): - await test_get_database_async(request_type=dict) - -def test_get_database_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = metastore.Database() - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_database_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetDatabaseRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - await client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_database_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_database_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - metastore.GetDatabaseRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_database_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Database() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_database( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_database_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_database( - metastore.GetDatabaseRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListDatabasesRequest, - dict, -]) -def test_list_databases(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_databases_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_databases(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListDatabasesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_databases_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_databases in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_databases] = mock_rpc - - request = {} - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_databases_async(transport: str = 'grpc_asyncio', request_type=metastore.ListDatabasesRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListDatabasesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_databases_async_from_dict(): - await test_list_databases_async(request_type=dict) - -def test_list_databases_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = metastore.ListDatabasesResponse() - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_databases_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListDatabasesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse()) - await client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_databases_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListDatabasesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_databases_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - metastore.ListDatabasesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_databases_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListDatabasesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_databases( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_databases_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_databases( - metastore.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_databases(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Database) - for i in results) -def test_list_databases_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - pages = list(client.list_databases(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_databases_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_databases(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Database) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_databases_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_databases(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metastore.CreateTableRequest, - dict, -]) -def test_create_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_create_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateTableRequest( - parent='parent_value', - table_id='table_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateTableRequest( - parent='parent_value', - table_id='table_id_value', - ) - -def test_create_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_table] = mock_rpc - request = {} - client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_table] = mock_rpc - - request = {} - await client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_table_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_table_async_from_dict(): - await test_create_table_async(request_type=dict) - -def test_create_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateTableRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value = metastore.Table() - client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateTableRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_table( - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].table_id - mock_val = 'table_id_value' - assert arg == mock_val - - -def test_create_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_table( - metastore.CreateTableRequest(), - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - -@pytest.mark.asyncio -async def test_create_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_table( - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].table_id - mock_val = 'table_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_table( - metastore.CreateTableRequest(), - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteTableRequest, - dict, -]) -def test_delete_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_delete_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteTableRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteTableRequest( - name='name_value', - ) - -def test_delete_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_table] = mock_rpc - request = {} - client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_table] = mock_rpc - - request = {} - await client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_table_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_delete_table_async_from_dict(): - await test_delete_table_async(request_type=dict) - -def test_delete_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value = metastore.Table() - client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_table( - metastore.DeleteTableRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_table( - metastore.DeleteTableRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateTableRequest, - dict, -]) -def test_update_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.UpdateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_update_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.UpdateTableRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.UpdateTableRequest( - ) - -def test_update_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_table] = mock_rpc - request = {} - client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_table] = mock_rpc - - request = {} - await client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_table_async(transport: str = 'grpc_asyncio', request_type=metastore.UpdateTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.UpdateTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_table_async_from_dict(): - await test_update_table_async(request_type=dict) - -def test_update_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateTableRequest() - - request.table.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value = metastore.Table() - client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'table.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.UpdateTableRequest() - - request.table.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'table.name=name_value', - ) in kw['metadata'] - - -def test_update_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_table( - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_table( - metastore.UpdateTableRequest(), - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_table( - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].table - mock_val = metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_table( - metastore.UpdateTableRequest(), - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.RenameTableRequest, - dict, -]) -def test_rename_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.RenameTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_rename_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.RenameTableRequest( - name='name_value', - new_name='new_name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.RenameTableRequest( - name='name_value', - new_name='new_name_value', - ) - -def test_rename_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_table] = mock_rpc - request = {} - client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_table] = mock_rpc - - request = {} - await client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_table_async(transport: str = 'grpc_asyncio', request_type=metastore.RenameTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.RenameTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_rename_table_async_from_dict(): - await test_rename_table_async(request_type=dict) - -def test_rename_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.RenameTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value = metastore.Table() - client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.RenameTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_table( - name='name_value', - new_name='new_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_name - mock_val = 'new_name_value' - assert arg == mock_val - - -def test_rename_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_table( - metastore.RenameTableRequest(), - name='name_value', - new_name='new_name_value', - ) - -@pytest.mark.asyncio -async def test_rename_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_table( - name='name_value', - new_name='new_name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_name - mock_val = 'new_name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_table( - metastore.RenameTableRequest(), - name='name_value', - new_name='new_name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetTableRequest, - dict, -]) -def test_get_table(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - response = client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.GetTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -def test_get_table_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.GetTableRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_table(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.GetTableRequest( - name='name_value', - ) - -def test_get_table_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table] = mock_rpc - request = {} - client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_table in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_table] = mock_rpc - - request = {} - await client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_table_async(transport: str = 'grpc_asyncio', request_type=metastore.GetTableRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - response = await client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.GetTableRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_table_async_from_dict(): - await test_get_table_async(request_type=dict) - -def test_get_table_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value = metastore.Table() - client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_table_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.GetTableRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - await client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_table_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_table_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table( - metastore.GetTableRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_table_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Table() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_table( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_table_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_table( - metastore.GetTableRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListTablesRequest, - dict, -]) -def test_list_tables(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListTablesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListTablesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_tables_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListTablesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_tables(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListTablesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_tables_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tables in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tables] = mock_rpc - request = {} - client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tables(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tables_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_tables in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_tables] = mock_rpc - - request = {} - await client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_tables(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_tables_async(transport: str = 'grpc_asyncio', request_type=metastore.ListTablesRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListTablesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_tables_async_from_dict(): - await test_list_tables_async(request_type=dict) - -def test_list_tables_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListTablesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value = metastore.ListTablesResponse() - client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_tables_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListTablesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse()) - await client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_tables_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListTablesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_tables( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_tables_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tables( - metastore.ListTablesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_tables_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListTablesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_tables( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_tables_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_tables( - metastore.ListTablesRequest(), - parent='parent_value', - ) - - -def test_list_tables_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_tables(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Table) - for i in results) -def test_list_tables_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - pages = list(client.list_tables(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_tables_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_tables(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Table) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_tables_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_tables(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metastore.CreateLockRequest, - dict, -]) -def test_create_lock(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - table_id='table_id_value', - ) - response = client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CreateLockRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Lock) - assert response.name == 'name_value' - assert response.type_ == metastore.Lock.Type.EXCLUSIVE - assert response.state == metastore.Lock.State.WAITING - - -def test_create_lock_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CreateLockRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_lock(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CreateLockRequest( - parent='parent_value', - ) - -def test_create_lock_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_lock in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_lock] = mock_rpc - request = {} - client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_lock_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_lock in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_lock] = mock_rpc - - request = {} - await client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_lock_async(transport: str = 'grpc_asyncio', request_type=metastore.CreateLockRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - )) - response = await client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CreateLockRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Lock) - assert response.name == 'name_value' - assert response.type_ == metastore.Lock.Type.EXCLUSIVE - assert response.state == metastore.Lock.State.WAITING - - -@pytest.mark.asyncio -async def test_create_lock_async_from_dict(): - await test_create_lock_async(request_type=dict) - -def test_create_lock_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateLockRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - call.return_value = metastore.Lock() - client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_lock_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CreateLockRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) - await client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_lock_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Lock() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_lock( - parent='parent_value', - lock=metastore.Lock(table_id='table_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].lock - mock_val = metastore.Lock(table_id='table_id_value') - assert arg == mock_val - - -def test_create_lock_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_lock( - metastore.CreateLockRequest(), - parent='parent_value', - lock=metastore.Lock(table_id='table_id_value'), - ) - -@pytest.mark.asyncio -async def test_create_lock_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Lock() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_lock( - parent='parent_value', - lock=metastore.Lock(table_id='table_id_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].lock - mock_val = metastore.Lock(table_id='table_id_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_lock_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_lock( - metastore.CreateLockRequest(), - parent='parent_value', - lock=metastore.Lock(table_id='table_id_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteLockRequest, - dict, -]) -def test_delete_lock(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.DeleteLockRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_lock_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.DeleteLockRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_lock(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.DeleteLockRequest( - name='name_value', - ) - -def test_delete_lock_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_lock in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_lock] = mock_rpc - request = {} - client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_lock_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_lock in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_lock] = mock_rpc - - request = {} - await client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_lock_async(transport: str = 'grpc_asyncio', request_type=metastore.DeleteLockRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.DeleteLockRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_lock_async_from_dict(): - await test_delete_lock_async(request_type=dict) - -def test_delete_lock_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteLockRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - call.return_value = None - client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_lock_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.DeleteLockRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_lock_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_lock( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_lock_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_lock( - metastore.DeleteLockRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_lock_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_lock( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_lock_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_lock( - metastore.DeleteLockRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.CheckLockRequest, - dict, -]) -def test_check_lock(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - table_id='table_id_value', - ) - response = client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.CheckLockRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Lock) - assert response.name == 'name_value' - assert response.type_ == metastore.Lock.Type.EXCLUSIVE - assert response.state == metastore.Lock.State.WAITING - - -def test_check_lock_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.CheckLockRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.check_lock(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.CheckLockRequest( - name='name_value', - ) - -def test_check_lock_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.check_lock in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.check_lock] = mock_rpc - request = {} - client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.check_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_check_lock_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.check_lock in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.check_lock] = mock_rpc - - request = {} - await client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.check_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_check_lock_async(transport: str = 'grpc_asyncio', request_type=metastore.CheckLockRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - )) - response = await client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.CheckLockRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Lock) - assert response.name == 'name_value' - assert response.type_ == metastore.Lock.Type.EXCLUSIVE - assert response.state == metastore.Lock.State.WAITING - - -@pytest.mark.asyncio -async def test_check_lock_async_from_dict(): - await test_check_lock_async(request_type=dict) - -def test_check_lock_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CheckLockRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - call.return_value = metastore.Lock() - client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_check_lock_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.CheckLockRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) - await client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_check_lock_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Lock() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.check_lock( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_check_lock_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.check_lock( - metastore.CheckLockRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_check_lock_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.Lock() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.check_lock( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_check_lock_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.check_lock( - metastore.CheckLockRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListLocksRequest, - dict, -]) -def test_list_locks(request_type, transport: str = 'grpc'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListLocksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = metastore.ListLocksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLocksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_locks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = metastore.ListLocksRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_locks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metastore.ListLocksRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_locks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_locks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_locks] = mock_rpc - request = {} - client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_locks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_locks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_locks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_locks] = mock_rpc - - request = {} - await client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_locks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_locks_async(transport: str = 'grpc_asyncio', request_type=metastore.ListLocksRequest): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListLocksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = metastore.ListLocksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLocksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_locks_async_from_dict(): - await test_list_locks_async(request_type=dict) - -def test_list_locks_field_headers(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListLocksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - call.return_value = metastore.ListLocksResponse() - client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_locks_field_headers_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metastore.ListLocksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListLocksResponse()) - await client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_locks_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListLocksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_locks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_locks_flattened_error(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_locks( - metastore.ListLocksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_locks_flattened_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metastore.ListLocksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListLocksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_locks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_locks_flattened_error_async(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_locks( - metastore.ListLocksRequest(), - parent='parent_value', - ) - - -def test_list_locks_pager(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - metastore.Lock(), - ], - next_page_token='abc', - ), - metastore.ListLocksResponse( - locks=[], - next_page_token='def', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - ], - next_page_token='ghi', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_locks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Lock) - for i in results) -def test_list_locks_pages(transport_name: str = "grpc"): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - metastore.Lock(), - ], - next_page_token='abc', - ), - metastore.ListLocksResponse( - locks=[], - next_page_token='def', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - ], - next_page_token='ghi', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - ], - ), - RuntimeError, - ) - pages = list(client.list_locks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_locks_async_pager(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - metastore.Lock(), - ], - next_page_token='abc', - ), - metastore.ListLocksResponse( - locks=[], - next_page_token='def', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - ], - next_page_token='ghi', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_locks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metastore.Lock) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_locks_async_pages(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - metastore.Lock(), - ], - next_page_token='abc', - ), - metastore.ListLocksResponse( - locks=[], - next_page_token='def', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - ], - next_page_token='ghi', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_locks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_catalog_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_catalog] = mock_rpc - - request = {} - client.create_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_catalog_rest_required_fields(request_type=metastore.CreateCatalogRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["catalog_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "catalogId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "catalogId" in jsonified_request - assert jsonified_request["catalogId"] == request_init["catalog_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["catalogId"] = 'catalog_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_catalog._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("catalog_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "catalogId" in jsonified_request - assert jsonified_request["catalogId"] == 'catalog_id_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_catalog(request) - - expected_params = [ - ( - "catalogId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_catalog_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_catalog._get_unset_required_fields({}) - assert set(unset_fields) == (set(("catalogId", )) & set(("parent", "catalog", "catalogId", ))) - - -def test_create_catalog_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_catalog(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*}/catalogs" % client.transport._host, args[1]) - - -def test_create_catalog_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_catalog( - metastore.CreateCatalogRequest(), - parent='parent_value', - catalog=metastore.Catalog(name='name_value'), - catalog_id='catalog_id_value', - ) - - -def test_delete_catalog_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_catalog] = mock_rpc - - request = {} - client.delete_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_catalog_rest_required_fields(request_type=metastore.DeleteCatalogRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_catalog(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_catalog_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_catalog._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_catalog_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_catalog(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*}" % client.transport._host, args[1]) - - -def test_delete_catalog_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_catalog( - metastore.DeleteCatalogRequest(), - name='name_value', - ) - - -def test_get_catalog_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_catalog in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_catalog] = mock_rpc - - request = {} - client.get_catalog(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_catalog(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_catalog_rest_required_fields(request_type=metastore.GetCatalogRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_catalog._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_catalog(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_catalog_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_catalog._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_catalog_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_catalog(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*}" % client.transport._host, args[1]) - - -def test_get_catalog_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_catalog( - metastore.GetCatalogRequest(), - name='name_value', - ) - - -def test_list_catalogs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_catalogs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_catalogs] = mock_rpc - - request = {} - client.list_catalogs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_catalogs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_catalogs_rest_required_fields(request_type=metastore.ListCatalogsRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_catalogs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_catalogs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListCatalogsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListCatalogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_catalogs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_catalogs_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_catalogs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_catalogs_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListCatalogsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListCatalogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_catalogs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*}/catalogs" % client.transport._host, args[1]) - - -def test_list_catalogs_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_catalogs( - metastore.ListCatalogsRequest(), - parent='parent_value', - ) - - -def test_list_catalogs_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - metastore.Catalog(), - ], - next_page_token='abc', - ), - metastore.ListCatalogsResponse( - catalogs=[], - next_page_token='def', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - ], - next_page_token='ghi', - ), - metastore.ListCatalogsResponse( - catalogs=[ - metastore.Catalog(), - metastore.Catalog(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListCatalogsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_catalogs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Catalog) - for i in results) - - pages = list(client.list_catalogs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_database] = mock_rpc - - request = {} - client.create_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_database_rest_required_fields(request_type=metastore.CreateDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["database_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "databaseId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == request_init["database_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["databaseId"] = 'database_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("database_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "databaseId" in jsonified_request - assert jsonified_request["databaseId"] == 'database_id_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_database(request) - - expected_params = [ - ( - "databaseId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("databaseId", )) & set(("parent", "database", "databaseId", ))) - - -def test_create_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases" % client.transport._host, args[1]) - - -def test_create_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_database( - metastore.CreateDatabaseRequest(), - parent='parent_value', - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - database_id='database_id_value', - ) - - -def test_delete_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_database] = mock_rpc - - request = {} - client.delete_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_database_rest_required_fields(request_type=metastore.DeleteDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}" % client.transport._host, args[1]) - - -def test_delete_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_database( - metastore.DeleteDatabaseRequest(), - name='name_value', - ) - - -def test_update_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_database] = mock_rpc - - request = {} - client.update_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_database_rest_required_fields(request_type=metastore.UpdateDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_database._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("database", ))) - - -def test_update_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'database': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{database.name=projects/*/locations/*/catalogs/*/databases/*}" % client.transport._host, args[1]) - - -def test_update_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_database( - metastore.UpdateDatabaseRequest(), - database=metastore.Database(hive_options=metastore.HiveDatabaseOptions(location_uri='location_uri_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_database_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_database in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_database] = mock_rpc - - request = {} - client.get_database(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_database(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_database_rest_required_fields(request_type=metastore.GetDatabaseRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_database._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_database(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_database_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_database._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_database_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_database(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*}" % client.transport._host, args[1]) - - -def test_get_database_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_database( - metastore.GetDatabaseRequest(), - name='name_value', - ) - - -def test_list_databases_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_databases in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_databases] = mock_rpc - - request = {} - client.list_databases(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_databases(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_databases_rest_required_fields(request_type=metastore.ListDatabasesRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_databases._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListDatabasesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_databases(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_databases_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_databases._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_databases_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListDatabasesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_databases(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*}/databases" % client.transport._host, args[1]) - - -def test_list_databases_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_databases( - metastore.ListDatabasesRequest(), - parent='parent_value', - ) - - -def test_list_databases_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - metastore.Database(), - ], - next_page_token='abc', - ), - metastore.ListDatabasesResponse( - databases=[], - next_page_token='def', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - ], - next_page_token='ghi', - ), - metastore.ListDatabasesResponse( - databases=[ - metastore.Database(), - metastore.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListDatabasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - - pager = client.list_databases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Database) - for i in results) - - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_table] = mock_rpc - - request = {} - client.create_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_table_rest_required_fields(request_type=metastore.CreateTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["table_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "tableId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "tableId" in jsonified_request - assert jsonified_request["tableId"] == request_init["table_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["tableId"] = 'table_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_table._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("table_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "tableId" in jsonified_request - assert jsonified_request["tableId"] == 'table_id_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_table(request) - - expected_params = [ - ( - "tableId", - "", - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(("tableId", )) & set(("parent", "table", "tableId", ))) - - -def test_create_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" % client.transport._host, args[1]) - - -def test_create_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_table( - metastore.CreateTableRequest(), - parent='parent_value', - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - table_id='table_id_value', - ) - - -def test_delete_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_table] = mock_rpc - - request = {} - client.delete_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_table_rest_required_fields(request_type=metastore.DeleteTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" % client.transport._host, args[1]) - - -def test_delete_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_table( - metastore.DeleteTableRequest(), - name='name_value', - ) - - -def test_update_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_table] = mock_rpc - - request = {} - client.update_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_table_rest_required_fields(request_type=metastore.UpdateTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_table._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("table", ))) - - -def test_update_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'table': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'}} - - # get truthy value for each flattened field - mock_args = dict( - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{table.name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" % client.transport._host, args[1]) - - -def test_update_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_table( - metastore.UpdateTableRequest(), - table=metastore.Table(hive_options=metastore.HiveTableOptions(parameters={'key_value': 'value_value'})), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_rename_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_table] = mock_rpc - - request = {} - client.rename_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_table_rest_required_fields(request_type=metastore.RenameTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newName"] = 'new_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newName" in jsonified_request - assert jsonified_request["newName"] == 'new_name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newName", ))) - - -def test_rename_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_name='new_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}:rename" % client.transport._host, args[1]) - - -def test_rename_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_table( - metastore.RenameTableRequest(), - name='name_value', - new_name='new_name_value', - ) - - -def test_get_table_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_table in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_table] = mock_rpc - - request = {} - client.get_table(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_table(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_table_rest_required_fields(request_type=metastore.GetTableRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_table._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_table(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_table_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_table._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_table_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_table(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/tables/*}" % client.transport._host, args[1]) - - -def test_get_table_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_table( - metastore.GetTableRequest(), - name='name_value', - ) - - -def test_list_tables_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_tables in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_tables] = mock_rpc - - request = {} - client.list_tables(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_tables(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_tables_rest_required_fields(request_type=metastore.ListTablesRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tables._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_tables._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "view", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListTablesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_tables(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_tables_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_tables._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "view", )) & set(("parent", ))) - - -def test_list_tables_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListTablesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_tables(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/tables" % client.transport._host, args[1]) - - -def test_list_tables_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_tables( - metastore.ListTablesRequest(), - parent='parent_value', - ) - - -def test_list_tables_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - metastore.Table(), - ], - next_page_token='abc', - ), - metastore.ListTablesResponse( - tables=[], - next_page_token='def', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - ], - next_page_token='ghi', - ), - metastore.ListTablesResponse( - tables=[ - metastore.Table(), - metastore.Table(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListTablesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - pager = client.list_tables(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Table) - for i in results) - - pages = list(client.list_tables(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_lock_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_lock in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_lock] = mock_rpc - - request = {} - client.create_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_lock_rest_required_fields(request_type=metastore.CreateLockRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_lock._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_lock._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Lock() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Lock.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_lock(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_lock_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_lock._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "lock", ))) - - -def test_create_lock_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Lock() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - lock=metastore.Lock(table_id='table_id_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Lock.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_lock(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks" % client.transport._host, args[1]) - - -def test_create_lock_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_lock( - metastore.CreateLockRequest(), - parent='parent_value', - lock=metastore.Lock(table_id='table_id_value'), - ) - - -def test_delete_lock_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_lock in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_lock] = mock_rpc - - request = {} - client.delete_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_lock_rest_required_fields(request_type=metastore.DeleteLockRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_lock._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_lock._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_lock(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_lock_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_lock._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_lock_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_lock(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}" % client.transport._host, args[1]) - - -def test_delete_lock_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_lock( - metastore.DeleteLockRequest(), - name='name_value', - ) - - -def test_check_lock_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.check_lock in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.check_lock] = mock_rpc - - request = {} - client.check_lock(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.check_lock(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_check_lock_rest_required_fields(request_type=metastore.CheckLockRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).check_lock._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).check_lock._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.Lock() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Lock.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.check_lock(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_check_lock_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.check_lock._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_check_lock_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Lock() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.Lock.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.check_lock(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{name=projects/*/locations/*/catalogs/*/databases/*/locks/*}:check" % client.transport._host, args[1]) - - -def test_check_lock_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.check_lock( - metastore.CheckLockRequest(), - name='name_value', - ) - - -def test_list_locks_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_locks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_locks] = mock_rpc - - request = {} - client.list_locks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_locks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_locks_rest_required_fields(request_type=metastore.ListLocksRequest): - transport_class = transports.MetastoreServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_locks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_locks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = metastore.ListLocksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListLocksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locks(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_locks_rest_unset_required_fields(): - transport = transports.MetastoreServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_locks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_locks_rest_flattened(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListLocksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = metastore.ListLocksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_locks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1alpha1/{parent=projects/*/locations/*/catalogs/*/databases/*}/locks" % client.transport._host, args[1]) - - -def test_list_locks_rest_flattened_error(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_locks( - metastore.ListLocksRequest(), - parent='parent_value', - ) - - -def test_list_locks_rest_pager(transport: str = 'rest'): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - metastore.Lock(), - ], - next_page_token='abc', - ), - metastore.ListLocksResponse( - locks=[], - next_page_token='def', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - ], - next_page_token='ghi', - ), - metastore.ListLocksResponse( - locks=[ - metastore.Lock(), - metastore.Lock(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metastore.ListLocksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - - pager = client.list_locks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metastore.Lock) - for i in results) - - pages = list(client.list_locks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetastoreServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetastoreServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetastoreServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetastoreServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceGrpcAsyncIOTransport, - transports.MetastoreServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = MetastoreServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_catalog_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.create_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_catalog_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.delete_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_catalog_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - call.return_value = metastore.Catalog() - client.get_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_catalogs_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - call.return_value = metastore.ListCatalogsResponse() - client.list_catalogs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListCatalogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - call.return_value = metastore.Database() - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - call.return_value = metastore.Database() - client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - call.return_value = metastore.Database() - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - call.return_value = metastore.Database() - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - call.return_value = metastore.ListDatabasesResponse() - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - call.return_value = metastore.Table() - client.create_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - call.return_value = metastore.Table() - client.delete_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - call.return_value = metastore.Table() - client.update_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - call.return_value = metastore.Table() - client.rename_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.RenameTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - call.return_value = metastore.Table() - client.get_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tables_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - call.return_value = metastore.ListTablesResponse() - client.list_tables(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListTablesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lock_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - call.return_value = metastore.Lock() - client.create_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lock_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - call.return_value = None - client.delete_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_check_lock_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - call.return_value = metastore.Lock() - client.check_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CheckLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_locks_empty_call_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - call.return_value = metastore.ListLocksResponse() - client.list_locks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListLocksRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = MetastoreServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_catalog_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - await client.create_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_catalog_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - await client.delete_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_catalog_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Catalog( - name='name_value', - )) - await client.get_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_catalogs_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - )) - await client.list_catalogs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListCatalogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_database_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - )) - await client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_databases_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - )) - await client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.create_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.delete_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.update_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.rename_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.RenameTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_table_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - )) - await client.get_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_tables_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListTablesResponse( - next_page_token='next_page_token_value', - )) - await client.list_tables(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListTablesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_lock_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - )) - await client.create_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_lock_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_check_lock_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - )) - await client.check_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CheckLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_locks_empty_call_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metastore.ListLocksResponse( - next_page_token='next_page_token_value', - )) - await client.list_locks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListLocksRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = MetastoreServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_catalog_rest_bad_request(request_type=metastore.CreateCatalogRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_catalog(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateCatalogRequest, - dict, -]) -def test_create_catalog_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["catalog"] = {'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateCatalogRequest.meta.fields["catalog"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["catalog"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["catalog"][field])): - del request_init["catalog"][field][i][subfield] - else: - del request_init["catalog"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_catalog(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_catalog_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_catalog") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_catalog_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_catalog") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateCatalogRequest.pb(metastore.CreateCatalogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Catalog.to_json(metastore.Catalog()) - req.return_value.content = return_value - - request = metastore.CreateCatalogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Catalog() - post_with_metadata.return_value = metastore.Catalog(), metadata - - client.create_catalog(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_catalog_rest_bad_request(request_type=metastore.DeleteCatalogRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_catalog(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteCatalogRequest, - dict, -]) -def test_delete_catalog_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_catalog(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_catalog_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_catalog") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_catalog_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_catalog") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.DeleteCatalogRequest.pb(metastore.DeleteCatalogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Catalog.to_json(metastore.Catalog()) - req.return_value.content = return_value - - request = metastore.DeleteCatalogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Catalog() - post_with_metadata.return_value = metastore.Catalog(), metadata - - client.delete_catalog(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_catalog_rest_bad_request(request_type=metastore.GetCatalogRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_catalog(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetCatalogRequest, - dict, -]) -def test_get_catalog_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Catalog( - name='name_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Catalog.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_catalog(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Catalog) - assert response.name == 'name_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_catalog_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_catalog") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_catalog_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_get_catalog") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.GetCatalogRequest.pb(metastore.GetCatalogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Catalog.to_json(metastore.Catalog()) - req.return_value.content = return_value - - request = metastore.GetCatalogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Catalog() - post_with_metadata.return_value = metastore.Catalog(), metadata - - client.get_catalog(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_catalogs_rest_bad_request(request_type=metastore.ListCatalogsRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_catalogs(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListCatalogsRequest, - dict, -]) -def test_list_catalogs_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListCatalogsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListCatalogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_catalogs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCatalogsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_catalogs_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_catalogs") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_catalogs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_catalogs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListCatalogsRequest.pb(metastore.ListCatalogsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListCatalogsResponse.to_json(metastore.ListCatalogsResponse()) - req.return_value.content = return_value - - request = metastore.ListCatalogsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListCatalogsResponse() - post_with_metadata.return_value = metastore.ListCatalogsResponse(), metadata - - client.list_catalogs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_database_rest_bad_request(request_type=metastore.CreateDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateDatabaseRequest, - dict, -]) -def test_create_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request_init["database"] = {'hive_options': {'location_uri': 'location_uri_value', 'parameters': {}}, 'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateDatabaseRequest.pb(metastore.CreateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.CreateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.create_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_database_rest_bad_request(request_type=metastore.DeleteDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteDatabaseRequest, - dict, -]) -def test_delete_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.DeleteDatabaseRequest.pb(metastore.DeleteDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.DeleteDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.delete_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_database_rest_bad_request(request_type=metastore.UpdateDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateDatabaseRequest, - dict, -]) -def test_update_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'database': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'}} - request_init["database"] = {'hive_options': {'location_uri': 'location_uri_value', 'parameters': {}}, 'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.UpdateDatabaseRequest.meta.fields["database"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["database"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["database"][field])): - del request_init["database"][field][i][subfield] - else: - del request_init["database"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_update_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.UpdateDatabaseRequest.pb(metastore.UpdateDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.UpdateDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.update_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_database_rest_bad_request(request_type=metastore.GetDatabaseRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_database(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetDatabaseRequest, - dict, -]) -def test_get_database_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Database( - name='name_value', - type_=metastore.Database.Type.HIVE, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Database.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_database(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Database) - assert response.name == 'name_value' - assert response.type_ == metastore.Database.Type.HIVE - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_database_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_database") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_database_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_get_database") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.GetDatabaseRequest.pb(metastore.GetDatabaseRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Database.to_json(metastore.Database()) - req.return_value.content = return_value - - request = metastore.GetDatabaseRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Database() - post_with_metadata.return_value = metastore.Database(), metadata - - client.get_database(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_databases_rest_bad_request(request_type=metastore.ListDatabasesRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListDatabasesRequest, - dict, -]) -def test_list_databases_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListDatabasesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListDatabasesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_databases") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_databases_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_databases") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListDatabasesRequest.pb(metastore.ListDatabasesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListDatabasesResponse.to_json(metastore.ListDatabasesResponse()) - req.return_value.content = return_value - - request = metastore.ListDatabasesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListDatabasesResponse() - post_with_metadata.return_value = metastore.ListDatabasesResponse(), metadata - - client.list_databases(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_table_rest_bad_request(request_type=metastore.CreateTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateTableRequest, - dict, -]) -def test_create_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request_init["table"] = {'hive_options': {'parameters': {}, 'table_type': 'table_type_value', 'storage_descriptor': {'location_uri': 'location_uri_value', 'input_format': 'input_format_value', 'output_format': 'output_format_value', 'serde_info': {'serialization_lib': 'serialization_lib_value'}}}, 'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateTableRequest.meta.fields["table"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["table"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["table"][field])): - del request_init["table"][field][i][subfield] - else: - del request_init["table"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateTableRequest.pb(metastore.CreateTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.CreateTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.create_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_table_rest_bad_request(request_type=metastore.DeleteTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteTableRequest, - dict, -]) -def test_delete_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_delete_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.DeleteTableRequest.pb(metastore.DeleteTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.DeleteTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.delete_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_table_rest_bad_request(request_type=metastore.UpdateTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'table': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.UpdateTableRequest, - dict, -]) -def test_update_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'table': {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'}} - request_init["table"] = {'hive_options': {'parameters': {}, 'table_type': 'table_type_value', 'storage_descriptor': {'location_uri': 'location_uri_value', 'input_format': 'input_format_value', 'output_format': 'output_format_value', 'serde_info': {'serialization_lib': 'serialization_lib_value'}}}, 'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'delete_time': {}, 'expire_time': {}, 'type_': 1, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.UpdateTableRequest.meta.fields["table"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["table"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["table"][field])): - del request_init["table"][field][i][subfield] - else: - del request_init["table"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_update_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_update_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.UpdateTableRequest.pb(metastore.UpdateTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.UpdateTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.update_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_table_rest_bad_request(request_type=metastore.RenameTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.RenameTableRequest, - dict, -]) -def test_rename_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_rename_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_rename_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_rename_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.RenameTableRequest.pb(metastore.RenameTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.RenameTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.rename_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_table_rest_bad_request(request_type=metastore.GetTableRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_table(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.GetTableRequest, - dict, -]) -def test_get_table_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/tables/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Table( - name='name_value', - type_=metastore.Table.Type.HIVE, - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Table.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_table(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Table) - assert response.name == 'name_value' - assert response.type_ == metastore.Table.Type.HIVE - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_table_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_table") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_get_table_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_get_table") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.GetTableRequest.pb(metastore.GetTableRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Table.to_json(metastore.Table()) - req.return_value.content = return_value - - request = metastore.GetTableRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Table() - post_with_metadata.return_value = metastore.Table(), metadata - - client.get_table(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_tables_rest_bad_request(request_type=metastore.ListTablesRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_tables(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListTablesRequest, - dict, -]) -def test_list_tables_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListTablesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListTablesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_tables(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTablesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_tables_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_tables") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_tables_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_tables") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListTablesRequest.pb(metastore.ListTablesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListTablesResponse.to_json(metastore.ListTablesResponse()) - req.return_value.content = return_value - - request = metastore.ListTablesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListTablesResponse() - post_with_metadata.return_value = metastore.ListTablesResponse(), metadata - - client.list_tables(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_lock_rest_bad_request(request_type=metastore.CreateLockRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_lock(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CreateLockRequest, - dict, -]) -def test_create_lock_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request_init["lock"] = {'table_id': 'table_id_value', 'name': 'name_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'type_': 1, 'state': 1} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = metastore.CreateLockRequest.meta.fields["lock"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["lock"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["lock"][field])): - del request_init["lock"][field][i][subfield] - else: - del request_init["lock"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - table_id='table_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Lock.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_lock(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Lock) - assert response.name == 'name_value' - assert response.type_ == metastore.Lock.Type.EXCLUSIVE - assert response.state == metastore.Lock.State.WAITING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_lock_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_lock") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_create_lock_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_create_lock") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CreateLockRequest.pb(metastore.CreateLockRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Lock.to_json(metastore.Lock()) - req.return_value.content = return_value - - request = metastore.CreateLockRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Lock() - post_with_metadata.return_value = metastore.Lock(), metadata - - client.create_lock(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_lock_rest_bad_request(request_type=metastore.DeleteLockRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_lock(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.DeleteLockRequest, - dict, -]) -def test_delete_lock_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_lock(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_lock_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_delete_lock") as pre: - pre.assert_not_called() - pb_message = metastore.DeleteLockRequest.pb(metastore.DeleteLockRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = metastore.DeleteLockRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_lock(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_check_lock_rest_bad_request(request_type=metastore.CheckLockRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.check_lock(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.CheckLockRequest, - dict, -]) -def test_check_lock_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4/locks/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.Lock( - name='name_value', - type_=metastore.Lock.Type.EXCLUSIVE, - state=metastore.Lock.State.WAITING, - table_id='table_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.Lock.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.check_lock(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metastore.Lock) - assert response.name == 'name_value' - assert response.type_ == metastore.Lock.Type.EXCLUSIVE - assert response.state == metastore.Lock.State.WAITING - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_check_lock_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_check_lock") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_check_lock_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_check_lock") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.CheckLockRequest.pb(metastore.CheckLockRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.Lock.to_json(metastore.Lock()) - req.return_value.content = return_value - - request = metastore.CheckLockRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.Lock() - post_with_metadata.return_value = metastore.Lock(), metadata - - client.check_lock(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_locks_rest_bad_request(request_type=metastore.ListLocksRequest): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locks(request) - - -@pytest.mark.parametrize("request_type", [ - metastore.ListLocksRequest, - dict, -]) -def test_list_locks_rest_call_success(request_type): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/catalogs/sample3/databases/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metastore.ListLocksResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = metastore.ListLocksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_locks(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLocksPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_locks_rest_interceptors(null_interceptor): - transport = transports.MetastoreServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetastoreServiceRestInterceptor(), - ) - client = MetastoreServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_locks") as post, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "post_list_locks_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.MetastoreServiceRestInterceptor, "pre_list_locks") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = metastore.ListLocksRequest.pb(metastore.ListLocksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = metastore.ListLocksResponse.to_json(metastore.ListLocksResponse()) - req.return_value.content = return_value - - request = metastore.ListLocksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metastore.ListLocksResponse() - post_with_metadata.return_value = metastore.ListLocksResponse(), metadata - - client.list_locks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_catalog_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_catalog), - '__call__') as call: - client.create_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_catalog_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_catalog), - '__call__') as call: - client.delete_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_catalog_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_catalog), - '__call__') as call: - client.get_catalog(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetCatalogRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_catalogs_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_catalogs), - '__call__') as call: - client.list_catalogs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListCatalogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_database), - '__call__') as call: - client.create_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_database), - '__call__') as call: - client.delete_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_database), - '__call__') as call: - client.update_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_database_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_database), - '__call__') as call: - client.get_database(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetDatabaseRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_databases_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_databases), - '__call__') as call: - client.list_databases(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListDatabasesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_table), - '__call__') as call: - client.create_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_table), - '__call__') as call: - client.delete_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_table), - '__call__') as call: - client.update_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.UpdateTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_table), - '__call__') as call: - client.rename_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.RenameTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_table_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_table), - '__call__') as call: - client.get_table(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.GetTableRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_tables_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_tables), - '__call__') as call: - client.list_tables(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListTablesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lock_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lock), - '__call__') as call: - client.create_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CreateLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lock_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lock), - '__call__') as call: - client.delete_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.DeleteLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_check_lock_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.check_lock), - '__call__') as call: - client.check_lock(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.CheckLockRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_locks_empty_call_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_locks), - '__call__') as call: - client.list_locks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = metastore.ListLocksRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetastoreServiceGrpcTransport, - ) - -def test_metastore_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetastoreServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metastore_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetastoreServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_catalog', - 'delete_catalog', - 'get_catalog', - 'list_catalogs', - 'create_database', - 'delete_database', - 'update_database', - 'get_database', - 'list_databases', - 'create_table', - 'delete_table', - 'update_table', - 'rename_table', - 'get_table', - 'list_tables', - 'create_lock', - 'delete_lock', - 'check_lock', - 'list_locks', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_metastore_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetastoreServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_metastore_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_biglake_v1alpha1.services.metastore_service.transports.MetastoreServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetastoreServiceTransport() - adc.assert_called_once() - - -def test_metastore_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetastoreServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceGrpcAsyncIOTransport, - ], -) -def test_metastore_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetastoreServiceGrpcTransport, - transports.MetastoreServiceGrpcAsyncIOTransport, - transports.MetastoreServiceRestTransport, - ], -) -def test_metastore_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetastoreServiceGrpcTransport, grpc_helpers), - (transports.MetastoreServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metastore_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "biglake.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="biglake.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetastoreServiceGrpcTransport, transports.MetastoreServiceGrpcAsyncIOTransport]) -def test_metastore_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_metastore_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MetastoreServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metastore_service_host_no_port(transport_name): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='biglake.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'biglake.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://biglake.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metastore_service_host_with_port(transport_name): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='biglake.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'biglake.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://biglake.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_metastore_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MetastoreServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = MetastoreServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_catalog._session - session2 = client2.transport.create_catalog._session - assert session1 != session2 - session1 = client1.transport.delete_catalog._session - session2 = client2.transport.delete_catalog._session - assert session1 != session2 - session1 = client1.transport.get_catalog._session - session2 = client2.transport.get_catalog._session - assert session1 != session2 - session1 = client1.transport.list_catalogs._session - session2 = client2.transport.list_catalogs._session - assert session1 != session2 - session1 = client1.transport.create_database._session - session2 = client2.transport.create_database._session - assert session1 != session2 - session1 = client1.transport.delete_database._session - session2 = client2.transport.delete_database._session - assert session1 != session2 - session1 = client1.transport.update_database._session - session2 = client2.transport.update_database._session - assert session1 != session2 - session1 = client1.transport.get_database._session - session2 = client2.transport.get_database._session - assert session1 != session2 - session1 = client1.transport.list_databases._session - session2 = client2.transport.list_databases._session - assert session1 != session2 - session1 = client1.transport.create_table._session - session2 = client2.transport.create_table._session - assert session1 != session2 - session1 = client1.transport.delete_table._session - session2 = client2.transport.delete_table._session - assert session1 != session2 - session1 = client1.transport.update_table._session - session2 = client2.transport.update_table._session - assert session1 != session2 - session1 = client1.transport.rename_table._session - session2 = client2.transport.rename_table._session - assert session1 != session2 - session1 = client1.transport.get_table._session - session2 = client2.transport.get_table._session - assert session1 != session2 - session1 = client1.transport.list_tables._session - session2 = client2.transport.list_tables._session - assert session1 != session2 - session1 = client1.transport.create_lock._session - session2 = client2.transport.create_lock._session - assert session1 != session2 - session1 = client1.transport.delete_lock._session - session2 = client2.transport.delete_lock._session - assert session1 != session2 - session1 = client1.transport.check_lock._session - session2 = client2.transport.check_lock._session - assert session1 != session2 - session1 = client1.transport.list_locks._session - session2 = client2.transport.list_locks._session - assert session1 != session2 -def test_metastore_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetastoreServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metastore_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetastoreServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetastoreServiceGrpcTransport, transports.MetastoreServiceGrpcAsyncIOTransport]) -def test_metastore_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetastoreServiceGrpcTransport, transports.MetastoreServiceGrpcAsyncIOTransport]) -def test_metastore_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_catalog_path(): - project = "squid" - location = "clam" - catalog = "whelk" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}".format(project=project, location=location, catalog=catalog, ) - actual = MetastoreServiceClient.catalog_path(project, location, catalog) - assert expected == actual - - -def test_parse_catalog_path(): - expected = { - "project": "octopus", - "location": "oyster", - "catalog": "nudibranch", - } - path = MetastoreServiceClient.catalog_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_catalog_path(path) - assert expected == actual - -def test_database_path(): - project = "cuttlefish" - location = "mussel" - catalog = "winkle" - database = "nautilus" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}".format(project=project, location=location, catalog=catalog, database=database, ) - actual = MetastoreServiceClient.database_path(project, location, catalog, database) - assert expected == actual - - -def test_parse_database_path(): - expected = { - "project": "scallop", - "location": "abalone", - "catalog": "squid", - "database": "clam", - } - path = MetastoreServiceClient.database_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_database_path(path) - assert expected == actual - -def test_lock_path(): - project = "whelk" - location = "octopus" - catalog = "oyster" - database = "nudibranch" - lock = "cuttlefish" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/locks/{lock}".format(project=project, location=location, catalog=catalog, database=database, lock=lock, ) - actual = MetastoreServiceClient.lock_path(project, location, catalog, database, lock) - assert expected == actual - - -def test_parse_lock_path(): - expected = { - "project": "mussel", - "location": "winkle", - "catalog": "nautilus", - "database": "scallop", - "lock": "abalone", - } - path = MetastoreServiceClient.lock_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_lock_path(path) - assert expected == actual - -def test_table_path(): - project = "squid" - location = "clam" - catalog = "whelk" - database = "octopus" - table = "oyster" - expected = "projects/{project}/locations/{location}/catalogs/{catalog}/databases/{database}/tables/{table}".format(project=project, location=location, catalog=catalog, database=database, table=table, ) - actual = MetastoreServiceClient.table_path(project, location, catalog, database, table) - assert expected == actual - - -def test_parse_table_path(): - expected = { - "project": "nudibranch", - "location": "cuttlefish", - "catalog": "mussel", - "database": "winkle", - "table": "nautilus", - } - path = MetastoreServiceClient.table_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_table_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "scallop" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetastoreServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "abalone", - } - path = MetastoreServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "squid" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetastoreServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "clam", - } - path = MetastoreServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "whelk" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetastoreServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "octopus", - } - path = MetastoreServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "oyster" - expected = "projects/{project}".format(project=project, ) - actual = MetastoreServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nudibranch", - } - path = MetastoreServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "cuttlefish" - location = "mussel" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetastoreServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "winkle", - "location": "nautilus", - } - path = MetastoreServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetastoreServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetastoreServiceTransport, '_prep_wrapped_messages') as prep: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetastoreServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MetastoreServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = MetastoreServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = MetastoreServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MetastoreServiceClient, transports.MetastoreServiceGrpcTransport), - (MetastoreServiceAsyncClient, transports.MetastoreServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-connection/v1/.coveragerc deleted file mode 100644 index 6431327e975a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_connection/__init__.py - google/cloud/bigquery_connection/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/.flake8 b/owl-bot-staging/google-cloud-bigquery-connection/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-connection/v1/MANIFEST.in deleted file mode 100644 index c1b6698a3c53..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_connection *.py -recursive-include google/cloud/bigquery_connection_v1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/README.rst b/owl-bot-staging/google-cloud-bigquery-connection/v1/README.rst deleted file mode 100644 index 356a1c2d2950..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Connection API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Connection API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/connection_service.rst b/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/connection_service.rst deleted file mode 100644 index 905920529de7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/connection_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ConnectionService ------------------------------------ - -.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_connection_v1.services.connection_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/services_.rst b/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/services_.rst deleted file mode 100644 index 317815a9deef..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Connection v1 API -==================================================== -.. toctree:: - :maxdepth: 2 - - connection_service diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/types_.rst b/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/types_.rst deleted file mode 100644 index 9c90aa6e8782..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/bigquery_connection_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Connection v1 API -================================================= - -.. automodule:: google.cloud.bigquery_connection_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/conf.py deleted file mode 100644 index fb2e821db8f6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-connection documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-connection" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-connection-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-connection.tex", - u"google-cloud-bigquery-connection Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-connection", - u"Google Cloud Bigquery Connection Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-connection", - u"google-cloud-bigquery-connection Documentation", - author, - "google-cloud-bigquery-connection", - "GAPIC library for Google Cloud Bigquery Connection API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/index.rst deleted file mode 100644 index 4bb25914466f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_connection_v1/services_ - bigquery_connection_v1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/__init__.py deleted file mode 100644 index 167dcc60a02b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/__init__.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_connection import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_connection_v1.services.connection_service.client import ConnectionServiceClient -from google.cloud.bigquery_connection_v1.services.connection_service.async_client import ConnectionServiceAsyncClient - -from google.cloud.bigquery_connection_v1.types.connection import AwsAccessRole -from google.cloud.bigquery_connection_v1.types.connection import AwsCrossAccountRole -from google.cloud.bigquery_connection_v1.types.connection import AwsProperties -from google.cloud.bigquery_connection_v1.types.connection import AzureProperties -from google.cloud.bigquery_connection_v1.types.connection import CloudResourceProperties -from google.cloud.bigquery_connection_v1.types.connection import CloudSpannerProperties -from google.cloud.bigquery_connection_v1.types.connection import CloudSqlCredential -from google.cloud.bigquery_connection_v1.types.connection import CloudSqlProperties -from google.cloud.bigquery_connection_v1.types.connection import Connection -from google.cloud.bigquery_connection_v1.types.connection import CreateConnectionRequest -from google.cloud.bigquery_connection_v1.types.connection import DeleteConnectionRequest -from google.cloud.bigquery_connection_v1.types.connection import GetConnectionRequest -from google.cloud.bigquery_connection_v1.types.connection import ListConnectionsRequest -from google.cloud.bigquery_connection_v1.types.connection import ListConnectionsResponse -from google.cloud.bigquery_connection_v1.types.connection import MetastoreServiceConfig -from google.cloud.bigquery_connection_v1.types.connection import SalesforceDataCloudProperties -from google.cloud.bigquery_connection_v1.types.connection import SparkHistoryServerConfig -from google.cloud.bigquery_connection_v1.types.connection import SparkProperties -from google.cloud.bigquery_connection_v1.types.connection import UpdateConnectionRequest - -__all__ = ('ConnectionServiceClient', - 'ConnectionServiceAsyncClient', - 'AwsAccessRole', - 'AwsCrossAccountRole', - 'AwsProperties', - 'AzureProperties', - 'CloudResourceProperties', - 'CloudSpannerProperties', - 'CloudSqlCredential', - 'CloudSqlProperties', - 'Connection', - 'CreateConnectionRequest', - 'DeleteConnectionRequest', - 'GetConnectionRequest', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'MetastoreServiceConfig', - 'SalesforceDataCloudProperties', - 'SparkHistoryServerConfig', - 'SparkProperties', - 'UpdateConnectionRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/py.typed b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/py.typed deleted file mode 100644 index cf21dff2cc5c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-connection package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/__init__.py deleted file mode 100644 index 3f15dfa4c0d2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_connection_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.connection_service import ConnectionServiceClient -from .services.connection_service import ConnectionServiceAsyncClient - -from .types.connection import AwsAccessRole -from .types.connection import AwsCrossAccountRole -from .types.connection import AwsProperties -from .types.connection import AzureProperties -from .types.connection import CloudResourceProperties -from .types.connection import CloudSpannerProperties -from .types.connection import CloudSqlCredential -from .types.connection import CloudSqlProperties -from .types.connection import Connection -from .types.connection import CreateConnectionRequest -from .types.connection import DeleteConnectionRequest -from .types.connection import GetConnectionRequest -from .types.connection import ListConnectionsRequest -from .types.connection import ListConnectionsResponse -from .types.connection import MetastoreServiceConfig -from .types.connection import SalesforceDataCloudProperties -from .types.connection import SparkHistoryServerConfig -from .types.connection import SparkProperties -from .types.connection import UpdateConnectionRequest - -__all__ = ( - 'ConnectionServiceAsyncClient', -'AwsAccessRole', -'AwsCrossAccountRole', -'AwsProperties', -'AzureProperties', -'CloudResourceProperties', -'CloudSpannerProperties', -'CloudSqlCredential', -'CloudSqlProperties', -'Connection', -'ConnectionServiceClient', -'CreateConnectionRequest', -'DeleteConnectionRequest', -'GetConnectionRequest', -'ListConnectionsRequest', -'ListConnectionsResponse', -'MetastoreServiceConfig', -'SalesforceDataCloudProperties', -'SparkHistoryServerConfig', -'SparkProperties', -'UpdateConnectionRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/gapic_metadata.json deleted file mode 100644 index df674495dc33..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/gapic_metadata.json +++ /dev/null @@ -1,148 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_connection_v1", - "protoPackage": "google.cloud.bigquery.connection.v1", - "schema": "1.0", - "services": { - "ConnectionService": { - "clients": { - "grpc": { - "libraryClient": "ConnectionServiceClient", - "rpcs": { - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ConnectionServiceAsyncClient", - "rpcs": { - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - }, - "rest": { - "libraryClient": "ConnectionServiceClient", - "rpcs": { - "CreateConnection": { - "methods": [ - "create_connection" - ] - }, - "DeleteConnection": { - "methods": [ - "delete_connection" - ] - }, - "GetConnection": { - "methods": [ - "get_connection" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListConnections": { - "methods": [ - "list_connections" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateConnection": { - "methods": [ - "update_connection" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/py.typed b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/py.typed deleted file mode 100644 index cf21dff2cc5c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-connection package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py deleted file mode 100644 index bedfac3b152f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ConnectionServiceClient -from .async_client import ConnectionServiceAsyncClient - -__all__ = ( - 'ConnectionServiceClient', - 'ConnectionServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py deleted file mode 100644 index c2330bd048f3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py +++ /dev/null @@ -1,1238 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_connection_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_connection_v1.services.connection_service import pagers -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport -from .client import ConnectionServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class ConnectionServiceAsyncClient: - """Manages external data source connections and credentials.""" - - _client: ConnectionServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = ConnectionServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ConnectionServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = ConnectionServiceClient._DEFAULT_UNIVERSE - - cluster_path = staticmethod(ConnectionServiceClient.cluster_path) - parse_cluster_path = staticmethod(ConnectionServiceClient.parse_cluster_path) - connection_path = staticmethod(ConnectionServiceClient.connection_path) - parse_connection_path = staticmethod(ConnectionServiceClient.parse_connection_path) - service_path = staticmethod(ConnectionServiceClient.service_path) - parse_service_path = staticmethod(ConnectionServiceClient.parse_service_path) - common_billing_account_path = staticmethod(ConnectionServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ConnectionServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ConnectionServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ConnectionServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ConnectionServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ConnectionServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ConnectionServiceClient.common_project_path) - parse_common_project_path = staticmethod(ConnectionServiceClient.parse_common_project_path) - common_location_path = staticmethod(ConnectionServiceClient.common_location_path) - parse_common_location_path = staticmethod(ConnectionServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConnectionServiceAsyncClient: The constructed client. - """ - return ConnectionServiceClient.from_service_account_info.__func__(ConnectionServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConnectionServiceAsyncClient: The constructed client. - """ - return ConnectionServiceClient.from_service_account_file.__func__(ConnectionServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ConnectionServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ConnectionServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ConnectionServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = ConnectionServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConnectionServiceTransport, Callable[..., ConnectionServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the connection service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ConnectionServiceTransport,Callable[..., ConnectionServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ConnectionServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ConnectionServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.connection_v1.ConnectionServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "credentialsType": None, - } - ) - - async def create_connection(self, - request: Optional[Union[gcbc_connection.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[gcbc_connection.Connection] = None, - connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbc_connection.Connection: - r"""Creates a new connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - async def sample_create_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.CreateConnectionRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_connection_v1.types.CreateConnectionRequest, dict]]): - The request object. The request for - [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. - parent (:class:`str`): - Required. Parent resource name. Must be in the format - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`): - Required. Connection to create. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_id (:class:`str`): - Optional. Connection id that should - be assigned to the created connection. - - This corresponds to the ``connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.types.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, connection, connection_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbc_connection.CreateConnectionRequest): - request = gcbc_connection.CreateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - if connection_id is not None: - request.connection_id = connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_connection(self, - request: Optional[Union[connection.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> connection.Connection: - r"""Returns specified connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - async def sample_get_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_connection_v1.types.GetConnectionRequest, dict]]): - The request object. The request for - [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. - name (:class:`str`): - Required. Name of the requested connection, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.types.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, connection.GetConnectionRequest): - request = connection.GetConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_connections(self, - request: Optional[Union[connection.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListConnectionsAsyncPager: - r"""Returns a list of connections in the given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - async def sample_list_connections(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.ListConnectionsRequest( - parent="parent_value", - page_size=951, - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_connection_v1.types.ListConnectionsRequest, dict]]): - The request object. The request for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - parent (:class:`str`): - Required. Parent resource name. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsAsyncPager: - The response for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, connection.ListConnectionsRequest): - request = connection.ListConnectionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListConnectionsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_connection(self, - request: Optional[Union[gcbc_connection.UpdateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - connection: Optional[gcbc_connection.Connection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbc_connection.Connection: - r"""Updates the specified connection. For security - reasons, also resets credential if connection properties - are in the update field mask. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - async def sample_update_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.UpdateConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.update_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest, dict]]): - The request object. The request for - [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. - name (:class:`str`): - Required. Name of the connection to update, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (:class:`google.cloud.bigquery_connection_v1.types.Connection`): - Required. Connection containing the - updated fields. - - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Update mask for the - connection fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.types.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, connection, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbc_connection.UpdateConnectionRequest): - request = gcbc_connection.UpdateConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if connection is not None: - request.connection = connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_connection(self, - request: Optional[Union[connection.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes connection and associated credential. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - async def sample_delete_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - await client.delete_connection(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest, dict]]): - The request object. The request for - [ConnectionService.DeleteConnectionRequest][]. - name (:class:`str`): - Required. Name of the deleted connection, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, connection.DeleteConnectionRequest): - request = connection.DeleteConnectionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does - not have a policy set. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on the specified resource. - Replaces any existing policy. - - Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and - ``PERMISSION_DENIED`` errors. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - resource (:class:`str`): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (:class:`MutableSequence[str]`): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest(resource=resource, permissions=permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ConnectionServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ConnectionServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/client.py deleted file mode 100644 index 6375e84be66f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/client.py +++ /dev/null @@ -1,1624 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_connection_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_connection_v1.services.connection_service import pagers -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ConnectionServiceGrpcTransport -from .transports.grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport -from .transports.rest import ConnectionServiceRestTransport - - -class ConnectionServiceClientMeta(type): - """Metaclass for the ConnectionService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ConnectionServiceTransport]] - _transport_registry["grpc"] = ConnectionServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ConnectionServiceGrpcAsyncIOTransport - _transport_registry["rest"] = ConnectionServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ConnectionServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ConnectionServiceClient(metaclass=ConnectionServiceClientMeta): - """Manages external data source connections and credentials.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigqueryconnection.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigqueryconnection.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConnectionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ConnectionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ConnectionServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ConnectionServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def cluster_path(project: str,region: str,cluster: str,) -> str: - """Returns a fully-qualified cluster string.""" - return "projects/{project}/regions/{region}/clusters/{cluster}".format(project=project, region=region, cluster=cluster, ) - - @staticmethod - def parse_cluster_path(path: str) -> Dict[str,str]: - """Parses a cluster path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/regions/(?P.+?)/clusters/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def connection_path(project: str,location: str,connection: str,) -> str: - """Returns a fully-qualified connection string.""" - return "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - - @staticmethod - def parse_connection_path(path: str) -> Dict[str,str]: - """Parses a connection path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/connections/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def service_path(project: str,location: str,service: str,) -> str: - """Returns a fully-qualified service string.""" - return "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - - @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: - """Parses a service path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/services/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = ConnectionServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ConnectionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ConnectionServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConnectionServiceTransport, Callable[..., ConnectionServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the connection service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ConnectionServiceTransport,Callable[..., ConnectionServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ConnectionServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ConnectionServiceClient._read_environment_variables() - self._client_cert_source = ConnectionServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ConnectionServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ConnectionServiceTransport) - if transport_provided: - # transport is a ConnectionServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ConnectionServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - ConnectionServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[ConnectionServiceTransport], Callable[..., ConnectionServiceTransport]] = ( - ConnectionServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ConnectionServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.connection_v1.ConnectionServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "credentialsType": None, - } - ) - - def create_connection(self, - request: Optional[Union[gcbc_connection.CreateConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - connection: Optional[gcbc_connection.Connection] = None, - connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbc_connection.Connection: - r"""Creates a new connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - def sample_create_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.CreateConnectionRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_connection_v1.types.CreateConnectionRequest, dict]): - The request object. The request for - [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. - parent (str): - Required. Parent resource name. Must be in the format - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (google.cloud.bigquery_connection_v1.types.Connection): - Required. Connection to create. - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection_id (str): - Optional. Connection id that should - be assigned to the created connection. - - This corresponds to the ``connection_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.types.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, connection, connection_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbc_connection.CreateConnectionRequest): - request = gcbc_connection.CreateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if connection is not None: - request.connection = connection - if connection_id is not None: - request.connection_id = connection_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_connection(self, - request: Optional[Union[connection.GetConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> connection.Connection: - r"""Returns specified connection. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - def sample_get_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_connection_v1.types.GetConnectionRequest, dict]): - The request object. The request for - [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. - name (str): - Required. Name of the requested connection, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.types.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, connection.GetConnectionRequest): - request = connection.GetConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_connections(self, - request: Optional[Union[connection.ListConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListConnectionsPager: - r"""Returns a list of connections in the given project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - def sample_list_connections(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.ListConnectionsRequest( - parent="parent_value", - page_size=951, - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_connection_v1.types.ListConnectionsRequest, dict]): - The request object. The request for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - parent (str): - Required. Parent resource name. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsPager: - The response for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, connection.ListConnectionsRequest): - request = connection.ListConnectionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_connections] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListConnectionsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_connection(self, - request: Optional[Union[gcbc_connection.UpdateConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - connection: Optional[gcbc_connection.Connection] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbc_connection.Connection: - r"""Updates the specified connection. For security - reasons, also resets credential if connection properties - are in the update field mask. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - def sample_update_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.UpdateConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.update_connection(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest, dict]): - The request object. The request for - [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. - name (str): - Required. Name of the connection to update, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - connection (google.cloud.bigquery_connection_v1.types.Connection): - Required. Connection containing the - updated fields. - - This corresponds to the ``connection`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Update mask for the - connection fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_connection_v1.types.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, connection, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbc_connection.UpdateConnectionRequest): - request = gcbc_connection.UpdateConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if connection is not None: - request.connection = connection - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_connection(self, - request: Optional[Union[connection.DeleteConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes connection and associated credential. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - - def sample_delete_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - client.delete_connection(request=request) - - Args: - request (Union[google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest, dict]): - The request object. The request for - [ConnectionService.DeleteConnectionRequest][]. - name (str): - Required. Name of the deleted connection, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, connection.DeleteConnectionRequest): - request = connection.DeleteConnectionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_connection] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does - not have a policy set. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being requested. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - resource: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the access control policy on the specified resource. - Replaces any existing policy. - - Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and - ``PERMISSION_DENIED`` errors. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - resource (str): - REQUIRED: The resource for which the - policy is being specified. See the - operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - if resource is not None: - request.resource = resource - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - resource: Optional[str] = None, - permissions: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_connection_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - resource (str): - REQUIRED: The resource for which the - policy detail is being requested. See - the operation documentation for the - appropriate value for this field. - - This corresponds to the ``resource`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - permissions (MutableSequence[str]): - The set of permissions to check for the ``resource``. - Permissions with wildcards (such as '*' or 'storage.*') - are not allowed. For more information see `IAM - Overview `__. - - This corresponds to the ``permissions`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [resource, permissions] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - if resource is not None: - request.resource = resource - if permissions: - request.permissions.extend(permissions) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ConnectionServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ConnectionServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py deleted file mode 100644 index 01ba233221e3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_connection_v1.types import connection - - -class ListConnectionsPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., connection.ListConnectionsResponse], - request: connection.ListConnectionsRequest, - response: connection.ListConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.bigquery_connection_v1.types.ListConnectionsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = connection.ListConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[connection.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[connection.Connection]: - for page in self.pages: - yield from page.connections - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListConnectionsAsyncPager: - """A pager for iterating through ``list_connections`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``connections`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListConnections`` requests and continue to iterate - through the ``connections`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_connection_v1.types.ListConnectionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[connection.ListConnectionsResponse]], - request: connection.ListConnectionsRequest, - response: connection.ListConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_connection_v1.types.ListConnectionsRequest): - The initial request object. - response (google.cloud.bigquery_connection_v1.types.ListConnectionsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = connection.ListConnectionsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[connection.ListConnectionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[connection.Connection]: - async def async_generator(): - async for page in self.pages: - for response in page.connections: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/README.rst deleted file mode 100644 index 78c1c64f4d94..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`ConnectionServiceTransport` is the ABC for all transports. -- public child `ConnectionServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ConnectionServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseConnectionServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ConnectionServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py deleted file mode 100644 index 7b9f61965e5a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ConnectionServiceTransport -from .grpc import ConnectionServiceGrpcTransport -from .grpc_asyncio import ConnectionServiceGrpcAsyncIOTransport -from .rest import ConnectionServiceRestTransport -from .rest import ConnectionServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ConnectionServiceTransport]] -_transport_registry['grpc'] = ConnectionServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ConnectionServiceGrpcAsyncIOTransport -_transport_registry['rest'] = ConnectionServiceRestTransport - -__all__ = ( - 'ConnectionServiceTransport', - 'ConnectionServiceGrpcTransport', - 'ConnectionServiceGrpcAsyncIOTransport', - 'ConnectionServiceRestTransport', - 'ConnectionServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py deleted file mode 100644 index 2c2890691a78..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/base.py +++ /dev/null @@ -1,287 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_connection_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class ConnectionServiceTransport(abc.ABC): - """Abstract transport class for ConnectionService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigqueryconnection.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryconnection.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_connection: gapic_v1.method.wrap_method( - self.create_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.get_connection: gapic_v1.method.wrap_method( - self.get_connection, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_connections: gapic_v1.method.wrap_method( - self.list_connections, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_connection: gapic_v1.method.wrap_method( - self.update_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_connection: gapic_v1.method.wrap_method( - self.delete_connection, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_connection(self) -> Callable[ - [gcbc_connection.CreateConnectionRequest], - Union[ - gcbc_connection.Connection, - Awaitable[gcbc_connection.Connection] - ]]: - raise NotImplementedError() - - @property - def get_connection(self) -> Callable[ - [connection.GetConnectionRequest], - Union[ - connection.Connection, - Awaitable[connection.Connection] - ]]: - raise NotImplementedError() - - @property - def list_connections(self) -> Callable[ - [connection.ListConnectionsRequest], - Union[ - connection.ListConnectionsResponse, - Awaitable[connection.ListConnectionsResponse] - ]]: - raise NotImplementedError() - - @property - def update_connection(self) -> Callable[ - [gcbc_connection.UpdateConnectionRequest], - Union[ - gcbc_connection.Connection, - Awaitable[gcbc_connection.Connection] - ]]: - raise NotImplementedError() - - @property - def delete_connection(self) -> Callable[ - [connection.DeleteConnectionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ConnectionServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py deleted file mode 100644 index 5937e2658cd8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc.py +++ /dev/null @@ -1,548 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ConnectionServiceGrpcTransport(ConnectionServiceTransport): - """gRPC backend transport for ConnectionService. - - Manages external data source connections and credentials. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigqueryconnection.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryconnection.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigqueryconnection.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_connection(self) -> Callable[ - [gcbc_connection.CreateConnectionRequest], - gcbc_connection.Connection]: - r"""Return a callable for the create connection method over gRPC. - - Creates a new connection. - - Returns: - Callable[[~.CreateConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/CreateConnection', - request_serializer=gcbc_connection.CreateConnectionRequest.serialize, - response_deserializer=gcbc_connection.Connection.deserialize, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [connection.GetConnectionRequest], - connection.Connection]: - r"""Return a callable for the get connection method over gRPC. - - Returns specified connection. - - Returns: - Callable[[~.GetConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/GetConnection', - request_serializer=connection.GetConnectionRequest.serialize, - response_deserializer=connection.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [connection.ListConnectionsRequest], - connection.ListConnectionsResponse]: - r"""Return a callable for the list connections method over gRPC. - - Returns a list of connections in the given project. - - Returns: - Callable[[~.ListConnectionsRequest], - ~.ListConnectionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/ListConnections', - request_serializer=connection.ListConnectionsRequest.serialize, - response_deserializer=connection.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def update_connection(self) -> Callable[ - [gcbc_connection.UpdateConnectionRequest], - gcbc_connection.Connection]: - r"""Return a callable for the update connection method over gRPC. - - Updates the specified connection. For security - reasons, also resets credential if connection properties - are in the update field mask. - - Returns: - Callable[[~.UpdateConnectionRequest], - ~.Connection]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/UpdateConnection', - request_serializer=gcbc_connection.UpdateConnectionRequest.serialize, - response_deserializer=gcbc_connection.Connection.deserialize, - ) - return self._stubs['update_connection'] - - @property - def delete_connection(self) -> Callable[ - [connection.DeleteConnectionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete connection method over gRPC. - - Deletes connection and associated credential. - - Returns: - Callable[[~.DeleteConnectionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection', - request_serializer=connection.DeleteConnectionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_connection'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does - not have a policy set. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on the specified resource. - Replaces any existing policy. - - Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and - ``PERMISSION_DENIED`` errors. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ConnectionServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py deleted file mode 100644 index 5647918296b7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,633 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ConnectionServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ConnectionServiceGrpcAsyncIOTransport(ConnectionServiceTransport): - """gRPC AsyncIO backend transport for ConnectionService. - - Manages external data source connections and credentials. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigqueryconnection.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigqueryconnection.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryconnection.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_connection(self) -> Callable[ - [gcbc_connection.CreateConnectionRequest], - Awaitable[gcbc_connection.Connection]]: - r"""Return a callable for the create connection method over gRPC. - - Creates a new connection. - - Returns: - Callable[[~.CreateConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_connection' not in self._stubs: - self._stubs['create_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/CreateConnection', - request_serializer=gcbc_connection.CreateConnectionRequest.serialize, - response_deserializer=gcbc_connection.Connection.deserialize, - ) - return self._stubs['create_connection'] - - @property - def get_connection(self) -> Callable[ - [connection.GetConnectionRequest], - Awaitable[connection.Connection]]: - r"""Return a callable for the get connection method over gRPC. - - Returns specified connection. - - Returns: - Callable[[~.GetConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_connection' not in self._stubs: - self._stubs['get_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/GetConnection', - request_serializer=connection.GetConnectionRequest.serialize, - response_deserializer=connection.Connection.deserialize, - ) - return self._stubs['get_connection'] - - @property - def list_connections(self) -> Callable[ - [connection.ListConnectionsRequest], - Awaitable[connection.ListConnectionsResponse]]: - r"""Return a callable for the list connections method over gRPC. - - Returns a list of connections in the given project. - - Returns: - Callable[[~.ListConnectionsRequest], - Awaitable[~.ListConnectionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_connections' not in self._stubs: - self._stubs['list_connections'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/ListConnections', - request_serializer=connection.ListConnectionsRequest.serialize, - response_deserializer=connection.ListConnectionsResponse.deserialize, - ) - return self._stubs['list_connections'] - - @property - def update_connection(self) -> Callable[ - [gcbc_connection.UpdateConnectionRequest], - Awaitable[gcbc_connection.Connection]]: - r"""Return a callable for the update connection method over gRPC. - - Updates the specified connection. For security - reasons, also resets credential if connection properties - are in the update field mask. - - Returns: - Callable[[~.UpdateConnectionRequest], - Awaitable[~.Connection]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_connection' not in self._stubs: - self._stubs['update_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/UpdateConnection', - request_serializer=gcbc_connection.UpdateConnectionRequest.serialize, - response_deserializer=gcbc_connection.Connection.deserialize, - ) - return self._stubs['update_connection'] - - @property - def delete_connection(self) -> Callable[ - [connection.DeleteConnectionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete connection method over gRPC. - - Deletes connection and associated credential. - - Returns: - Callable[[~.DeleteConnectionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_connection' not in self._stubs: - self._stubs['delete_connection'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/DeleteConnection', - request_serializer=connection.DeleteConnectionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_connection'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does - not have a policy set. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the access control policy on the specified resource. - Replaces any existing policy. - - Can return ``NOT_FOUND``, ``INVALID_ARGUMENT``, and - ``PERMISSION_DENIED`` errors. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a ``NOT_FOUND`` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for - authorization checking. This operation may "fail open" without - warning. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.connection.v1.ConnectionService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_connection: self._wrap_method( - self.create_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.get_connection: self._wrap_method( - self.get_connection, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_connections: self._wrap_method( - self.list_connections, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_connection: self._wrap_method( - self.update_connection, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_connection: self._wrap_method( - self.delete_connection, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'ConnectionServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py deleted file mode 100644 index 64781ff3f12c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest.py +++ /dev/null @@ -1,1663 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseConnectionServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class ConnectionServiceRestInterceptor: - """Interceptor for ConnectionService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ConnectionServiceRestTransport. - - .. code-block:: python - class MyCustomConnectionServiceInterceptor(ConnectionServiceRestInterceptor): - def pre_create_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_connection(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_connections(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_connections(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_connection(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_connection(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ConnectionServiceRestTransport(interceptor=MyCustomConnectionServiceInterceptor()) - client = ConnectionServiceClient(transport=transport) - - - """ - def pre_create_connection(self, request: gcbc_connection.CreateConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbc_connection.CreateConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_create_connection(self, response: gcbc_connection.Connection) -> gcbc_connection.Connection: - """Post-rpc interceptor for create_connection - - DEPRECATED. Please use the `post_create_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_create_connection` interceptor runs - before the `post_create_connection_with_metadata` interceptor. - """ - return response - - def post_create_connection_with_metadata(self, response: gcbc_connection.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbc_connection.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_create_connection_with_metadata` - interceptor in new development instead of the `post_create_connection` interceptor. - When both interceptors are used, this `post_create_connection_with_metadata` interceptor runs after the - `post_create_connection` interceptor. The (possibly modified) response returned by - `post_create_connection` will be passed to - `post_create_connection_with_metadata`. - """ - return response, metadata - - def pre_delete_connection(self, request: connection.DeleteConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[connection.DeleteConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def pre_get_connection(self, request: connection.GetConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[connection.GetConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_get_connection(self, response: connection.Connection) -> connection.Connection: - """Post-rpc interceptor for get_connection - - DEPRECATED. Please use the `post_get_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_get_connection` interceptor runs - before the `post_get_connection_with_metadata` interceptor. - """ - return response - - def post_get_connection_with_metadata(self, response: connection.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[connection.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_get_connection_with_metadata` - interceptor in new development instead of the `post_get_connection` interceptor. - When both interceptors are used, this `post_get_connection_with_metadata` interceptor runs after the - `post_get_connection` interceptor. The (possibly modified) response returned by - `post_get_connection` will be passed to - `post_get_connection_with_metadata`. - """ - return response, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_list_connections(self, request: connection.ListConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[connection.ListConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_connections - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_list_connections(self, response: connection.ListConnectionsResponse) -> connection.ListConnectionsResponse: - """Post-rpc interceptor for list_connections - - DEPRECATED. Please use the `post_list_connections_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_list_connections` interceptor runs - before the `post_list_connections_with_metadata` interceptor. - """ - return response - - def post_list_connections_with_metadata(self, response: connection.ListConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[connection.ListConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_connections - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_list_connections_with_metadata` - interceptor in new development instead of the `post_list_connections` interceptor. - When both interceptors are used, this `post_list_connections_with_metadata` interceptor runs after the - `post_list_connections` interceptor. The (possibly modified) response returned by - `post_list_connections` will be passed to - `post_list_connections_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_connection(self, request: gcbc_connection.UpdateConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbc_connection.UpdateConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_connection - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConnectionService server. - """ - return request, metadata - - def post_update_connection(self, response: gcbc_connection.Connection) -> gcbc_connection.Connection: - """Post-rpc interceptor for update_connection - - DEPRECATED. Please use the `post_update_connection_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ConnectionService server but before - it is returned to user code. This `post_update_connection` interceptor runs - before the `post_update_connection_with_metadata` interceptor. - """ - return response - - def post_update_connection_with_metadata(self, response: gcbc_connection.Connection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbc_connection.Connection, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_connection - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ConnectionService server but before it is returned to user code. - - We recommend only using this `post_update_connection_with_metadata` - interceptor in new development instead of the `post_update_connection` interceptor. - When both interceptors are used, this `post_update_connection_with_metadata` interceptor runs after the - `post_update_connection` interceptor. The (possibly modified) response returned by - `post_update_connection` will be passed to - `post_update_connection_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class ConnectionServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ConnectionServiceRestInterceptor - - -class ConnectionServiceRestTransport(_BaseConnectionServiceRestTransport): - """REST backend synchronous transport for ConnectionService. - - Manages external data source connections and credentials. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigqueryconnection.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ConnectionServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryconnection.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ConnectionServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateConnection(_BaseConnectionServiceRestTransport._BaseCreateConnection, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.CreateConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcbc_connection.CreateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcbc_connection.Connection: - r"""Call the create connection method over HTTP. - - Args: - request (~.gcbc_connection.CreateConnectionRequest): - The request object. The request for - [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcbc_connection.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - - http_options = _BaseConnectionServiceRestTransport._BaseCreateConnection._get_http_options() - - request, metadata = self._interceptor.pre_create_connection(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseCreateConnection._get_transcoded_request(http_options, request) - - body = _BaseConnectionServiceRestTransport._BaseCreateConnection._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseCreateConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.CreateConnection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "CreateConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._CreateConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcbc_connection.Connection() - pb_resp = gcbc_connection.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcbc_connection.Connection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.create_connection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "CreateConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteConnection(_BaseConnectionServiceRestTransport._BaseDeleteConnection, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.DeleteConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: connection.DeleteConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete connection method over HTTP. - - Args: - request (~.connection.DeleteConnectionRequest): - The request object. The request for - [ConnectionService.DeleteConnectionRequest][]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseConnectionServiceRestTransport._BaseDeleteConnection._get_http_options() - - request, metadata = self._interceptor.pre_delete_connection(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseDeleteConnection._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseDeleteConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.DeleteConnection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "DeleteConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._DeleteConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetConnection(_BaseConnectionServiceRestTransport._BaseGetConnection, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.GetConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: connection.GetConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> connection.Connection: - r"""Call the get connection method over HTTP. - - Args: - request (~.connection.GetConnectionRequest): - The request object. The request for - [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.connection.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - - http_options = _BaseConnectionServiceRestTransport._BaseGetConnection._get_http_options() - - request, metadata = self._interceptor.pre_get_connection(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseGetConnection._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseGetConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.GetConnection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "GetConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._GetConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = connection.Connection() - pb_resp = connection.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = connection.Connection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.get_connection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "GetConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetIamPolicy(_BaseConnectionServiceRestTransport._BaseGetIamPolicy, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseConnectionServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseConnectionServiceRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.get_iam_policy", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListConnections(_BaseConnectionServiceRestTransport._BaseListConnections, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.ListConnections") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: connection.ListConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> connection.ListConnectionsResponse: - r"""Call the list connections method over HTTP. - - Args: - request (~.connection.ListConnectionsRequest): - The request object. The request for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.connection.ListConnectionsResponse: - The response for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - - """ - - http_options = _BaseConnectionServiceRestTransport._BaseListConnections._get_http_options() - - request, metadata = self._interceptor.pre_list_connections(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseListConnections._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseListConnections._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.ListConnections", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "ListConnections", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._ListConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = connection.ListConnectionsResponse() - pb_resp = connection.ListConnectionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_connections(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = connection.ListConnectionsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.list_connections", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "ListConnections", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseConnectionServiceRestTransport._BaseSetIamPolicy, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseConnectionServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseConnectionServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.set_iam_policy", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseConnectionServiceRestTransport._BaseTestIamPermissions, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseConnectionServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseConnectionServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.test_iam_permissions", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateConnection(_BaseConnectionServiceRestTransport._BaseUpdateConnection, ConnectionServiceRestStub): - def __hash__(self): - return hash("ConnectionServiceRestTransport.UpdateConnection") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcbc_connection.UpdateConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcbc_connection.Connection: - r"""Call the update connection method over HTTP. - - Args: - request (~.gcbc_connection.UpdateConnectionRequest): - The request object. The request for - [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcbc_connection.Connection: - Configuration parameters to establish - connection with an external data source, - except the credential attributes. - - """ - - http_options = _BaseConnectionServiceRestTransport._BaseUpdateConnection._get_http_options() - - request, metadata = self._interceptor.pre_update_connection(request, metadata) - transcoded_request = _BaseConnectionServiceRestTransport._BaseUpdateConnection._get_transcoded_request(http_options, request) - - body = _BaseConnectionServiceRestTransport._BaseUpdateConnection._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseConnectionServiceRestTransport._BaseUpdateConnection._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.connection_v1.ConnectionServiceClient.UpdateConnection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "UpdateConnection", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ConnectionServiceRestTransport._UpdateConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcbc_connection.Connection() - pb_resp = gcbc_connection.Connection.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_connection(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcbc_connection.Connection.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.connection_v1.ConnectionServiceClient.update_connection", - extra = { - "serviceName": "google.cloud.bigquery.connection.v1.ConnectionService", - "rpcName": "UpdateConnection", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_connection(self) -> Callable[ - [gcbc_connection.CreateConnectionRequest], - gcbc_connection.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_connection(self) -> Callable[ - [connection.DeleteConnectionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_connection(self) -> Callable[ - [connection.GetConnectionRequest], - connection.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_connections(self) -> Callable[ - [connection.ListConnectionsRequest], - connection.ListConnectionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListConnections(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_connection(self) -> Callable[ - [gcbc_connection.UpdateConnectionRequest], - gcbc_connection.Connection]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateConnection(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ConnectionServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest_base.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest_base.py deleted file mode 100644 index 5200f9718447..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/services/connection_service/transports/rest_base.py +++ /dev/null @@ -1,441 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import ConnectionServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseConnectionServiceRestTransport(ConnectionServiceTransport): - """Base REST backend transport for ConnectionService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigqueryconnection.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryconnection.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/connections', - 'body': 'connection', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcbc_connection.CreateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseCreateConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/connections/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = connection.DeleteConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseDeleteConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/connections/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = connection.GetConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseGetConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/connections/*}:getIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListConnections: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "pageSize" : 0, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/connections', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = connection.ListConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseListConnections._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/connections/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/connections/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateConnection: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{name=projects/*/locations/*/connections/*}', - 'body': 'connection', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcbc_connection.UpdateConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseConnectionServiceRestTransport._BaseUpdateConnection._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseConnectionServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/types/__init__.py deleted file mode 100644 index 6be074b77fd0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/types/__init__.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .connection import ( - AwsAccessRole, - AwsCrossAccountRole, - AwsProperties, - AzureProperties, - CloudResourceProperties, - CloudSpannerProperties, - CloudSqlCredential, - CloudSqlProperties, - Connection, - CreateConnectionRequest, - DeleteConnectionRequest, - GetConnectionRequest, - ListConnectionsRequest, - ListConnectionsResponse, - MetastoreServiceConfig, - SalesforceDataCloudProperties, - SparkHistoryServerConfig, - SparkProperties, - UpdateConnectionRequest, -) - -__all__ = ( - 'AwsAccessRole', - 'AwsCrossAccountRole', - 'AwsProperties', - 'AzureProperties', - 'CloudResourceProperties', - 'CloudSpannerProperties', - 'CloudSqlCredential', - 'CloudSqlProperties', - 'Connection', - 'CreateConnectionRequest', - 'DeleteConnectionRequest', - 'GetConnectionRequest', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'MetastoreServiceConfig', - 'SalesforceDataCloudProperties', - 'SparkHistoryServerConfig', - 'SparkProperties', - 'UpdateConnectionRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/types/connection.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/types/connection.py deleted file mode 100644 index bd46116d4689..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/google/cloud/bigquery_connection_v1/types/connection.py +++ /dev/null @@ -1,779 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.connection.v1', - manifest={ - 'CreateConnectionRequest', - 'GetConnectionRequest', - 'ListConnectionsRequest', - 'ListConnectionsResponse', - 'UpdateConnectionRequest', - 'DeleteConnectionRequest', - 'Connection', - 'CloudSqlProperties', - 'CloudSqlCredential', - 'CloudSpannerProperties', - 'AwsProperties', - 'AwsCrossAccountRole', - 'AwsAccessRole', - 'AzureProperties', - 'CloudResourceProperties', - 'MetastoreServiceConfig', - 'SparkHistoryServerConfig', - 'SparkProperties', - 'SalesforceDataCloudProperties', - }, -) - - -class CreateConnectionRequest(proto.Message): - r"""The request for - [ConnectionService.CreateConnection][google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection]. - - Attributes: - parent (str): - Required. Parent resource name. Must be in the format - ``projects/{project_id}/locations/{location_id}`` - connection_id (str): - Optional. Connection id that should be - assigned to the created connection. - connection (google.cloud.bigquery_connection_v1.types.Connection): - Required. Connection to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - connection_id: str = proto.Field( - proto.STRING, - number=2, - ) - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=3, - message='Connection', - ) - - -class GetConnectionRequest(proto.Message): - r"""The request for - [ConnectionService.GetConnection][google.cloud.bigquery.connection.v1.ConnectionService.GetConnection]. - - Attributes: - name (str): - Required. Name of the requested connection, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListConnectionsRequest(proto.Message): - r"""The request for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - - Attributes: - parent (str): - Required. Parent resource name. Must be in the form: - ``projects/{project_id}/locations/{location_id}`` - page_size (int): - Required. Page size. - page_token (str): - Page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListConnectionsResponse(proto.Message): - r"""The response for - [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections]. - - Attributes: - next_page_token (str): - Next page token. - connections (MutableSequence[google.cloud.bigquery_connection_v1.types.Connection]): - List of connections. - """ - - @property - def raw_page(self): - return self - - next_page_token: str = proto.Field( - proto.STRING, - number=1, - ) - connections: MutableSequence['Connection'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='Connection', - ) - - -class UpdateConnectionRequest(proto.Message): - r"""The request for - [ConnectionService.UpdateConnection][google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection]. - - Attributes: - name (str): - Required. Name of the connection to update, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - connection (google.cloud.bigquery_connection_v1.types.Connection): - Required. Connection containing the updated - fields. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Update mask for the connection - fields to be updated. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - connection: 'Connection' = proto.Field( - proto.MESSAGE, - number=2, - message='Connection', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteConnectionRequest(proto.Message): - r"""The request for [ConnectionService.DeleteConnectionRequest][]. - - Attributes: - name (str): - Required. Name of the deleted connection, for example: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Connection(proto.Message): - r"""Configuration parameters to establish connection with an - external data source, except the credential attributes. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The resource name of the connection in the form of: - ``projects/{project_id}/locations/{location_id}/connections/{connection_id}`` - friendly_name (str): - User provided display name for the - connection. - description (str): - User provided description. - cloud_sql (google.cloud.bigquery_connection_v1.types.CloudSqlProperties): - Cloud SQL properties. - - This field is a member of `oneof`_ ``properties``. - aws (google.cloud.bigquery_connection_v1.types.AwsProperties): - Amazon Web Services (AWS) properties. - - This field is a member of `oneof`_ ``properties``. - azure (google.cloud.bigquery_connection_v1.types.AzureProperties): - Azure properties. - - This field is a member of `oneof`_ ``properties``. - cloud_spanner (google.cloud.bigquery_connection_v1.types.CloudSpannerProperties): - Cloud Spanner properties. - - This field is a member of `oneof`_ ``properties``. - cloud_resource (google.cloud.bigquery_connection_v1.types.CloudResourceProperties): - Cloud Resource properties. - - This field is a member of `oneof`_ ``properties``. - spark (google.cloud.bigquery_connection_v1.types.SparkProperties): - Spark properties. - - This field is a member of `oneof`_ ``properties``. - salesforce_data_cloud (google.cloud.bigquery_connection_v1.types.SalesforceDataCloudProperties): - Optional. Salesforce DataCloud properties. - This field is intended for use only by - Salesforce partner projects. This field contains - properties for your Salesforce DataCloud - connection. - - This field is a member of `oneof`_ ``properties``. - creation_time (int): - Output only. The creation timestamp of the - connection. - last_modified_time (int): - Output only. The last update timestamp of the - connection. - has_credential (bool): - Output only. True, if credential is - configured for this connection. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - friendly_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - cloud_sql: 'CloudSqlProperties' = proto.Field( - proto.MESSAGE, - number=4, - oneof='properties', - message='CloudSqlProperties', - ) - aws: 'AwsProperties' = proto.Field( - proto.MESSAGE, - number=8, - oneof='properties', - message='AwsProperties', - ) - azure: 'AzureProperties' = proto.Field( - proto.MESSAGE, - number=11, - oneof='properties', - message='AzureProperties', - ) - cloud_spanner: 'CloudSpannerProperties' = proto.Field( - proto.MESSAGE, - number=21, - oneof='properties', - message='CloudSpannerProperties', - ) - cloud_resource: 'CloudResourceProperties' = proto.Field( - proto.MESSAGE, - number=22, - oneof='properties', - message='CloudResourceProperties', - ) - spark: 'SparkProperties' = proto.Field( - proto.MESSAGE, - number=23, - oneof='properties', - message='SparkProperties', - ) - salesforce_data_cloud: 'SalesforceDataCloudProperties' = proto.Field( - proto.MESSAGE, - number=24, - oneof='properties', - message='SalesforceDataCloudProperties', - ) - creation_time: int = proto.Field( - proto.INT64, - number=5, - ) - last_modified_time: int = proto.Field( - proto.INT64, - number=6, - ) - has_credential: bool = proto.Field( - proto.BOOL, - number=7, - ) - - -class CloudSqlProperties(proto.Message): - r"""Connection properties specific to the Cloud SQL. - - Attributes: - instance_id (str): - Cloud SQL instance ID in the form - ``project:location:instance``. - database (str): - Database name. - type_ (google.cloud.bigquery_connection_v1.types.CloudSqlProperties.DatabaseType): - Type of the Cloud SQL database. - credential (google.cloud.bigquery_connection_v1.types.CloudSqlCredential): - Input only. Cloud SQL credential. - service_account_id (str): - Output only. The account ID of the service - used for the purpose of this connection. - - When the connection is used in the context of an - operation in BigQuery, this service account will - serve as the identity being used for connecting - to the CloudSQL instance specified in this - connection. - """ - class DatabaseType(proto.Enum): - r"""Supported Cloud SQL database types. - - Values: - DATABASE_TYPE_UNSPECIFIED (0): - Unspecified database type. - POSTGRES (1): - Cloud SQL for PostgreSQL. - MYSQL (2): - Cloud SQL for MySQL. - """ - DATABASE_TYPE_UNSPECIFIED = 0 - POSTGRES = 1 - MYSQL = 2 - - instance_id: str = proto.Field( - proto.STRING, - number=1, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - type_: DatabaseType = proto.Field( - proto.ENUM, - number=3, - enum=DatabaseType, - ) - credential: 'CloudSqlCredential' = proto.Field( - proto.MESSAGE, - number=4, - message='CloudSqlCredential', - ) - service_account_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class CloudSqlCredential(proto.Message): - r"""Credential info for the Cloud SQL. - - Attributes: - username (str): - The username for the credential. - password (str): - The password for the credential. - """ - - username: str = proto.Field( - proto.STRING, - number=1, - ) - password: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CloudSpannerProperties(proto.Message): - r"""Connection properties specific to Cloud Spanner. - - Attributes: - database (str): - Cloud Spanner database in the form - \`project/instance/database' - use_parallelism (bool): - If parallelism should be used when reading - from Cloud Spanner - max_parallelism (int): - Allows setting max parallelism per query when executing on - Spanner independent compute resources. If unspecified, - default values of parallelism are chosen that are dependent - on the Cloud Spanner instance configuration. - - REQUIRES: ``use_parallelism`` must be set. REQUIRES: Either - ``use_data_boost`` or ``use_serverless_analytics`` must be - set. - use_serverless_analytics (bool): - If the serverless analytics service should be used to read - data from Cloud Spanner. Note: ``use_parallelism`` must be - set when using serverless analytics. - use_data_boost (bool): - If set, the request will be executed via Spanner independent - compute resources. REQUIRES: ``use_parallelism`` must be - set. - - NOTE: ``use_serverless_analytics`` will be deprecated. - Prefer ``use_data_boost`` over ``use_serverless_analytics``. - database_role (str): - Optional. Cloud Spanner database role for fine-grained - access control. The Cloud Spanner admin should have - provisioned the database role with appropriate permissions, - such as ``SELECT`` and ``INSERT``. Other users should only - use roles provided by their Cloud Spanner admins. - - For more details, see [About fine-grained access control] - (https://cloud.google.com/spanner/docs/fgac-about). - - REQUIRES: The database role name must start with a letter, - and can only contain letters, numbers, and underscores. - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - use_parallelism: bool = proto.Field( - proto.BOOL, - number=2, - ) - max_parallelism: int = proto.Field( - proto.INT32, - number=5, - ) - use_serverless_analytics: bool = proto.Field( - proto.BOOL, - number=3, - ) - use_data_boost: bool = proto.Field( - proto.BOOL, - number=6, - ) - database_role: str = proto.Field( - proto.STRING, - number=4, - ) - - -class AwsProperties(proto.Message): - r"""Connection properties specific to Amazon Web Services (AWS). - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - cross_account_role (google.cloud.bigquery_connection_v1.types.AwsCrossAccountRole): - Authentication using Google owned AWS IAM - user's access key to assume into customer's AWS - IAM Role. Deprecated, do not use. - - This field is a member of `oneof`_ ``authentication_method``. - access_role (google.cloud.bigquery_connection_v1.types.AwsAccessRole): - Authentication using Google owned service - account to assume into customer's AWS IAM Role. - - This field is a member of `oneof`_ ``authentication_method``. - """ - - cross_account_role: 'AwsCrossAccountRole' = proto.Field( - proto.MESSAGE, - number=2, - oneof='authentication_method', - message='AwsCrossAccountRole', - ) - access_role: 'AwsAccessRole' = proto.Field( - proto.MESSAGE, - number=3, - oneof='authentication_method', - message='AwsAccessRole', - ) - - -class AwsCrossAccountRole(proto.Message): - r"""Authentication method for Amazon Web Services (AWS) that uses - Google owned AWS IAM user's access key to assume into customer's - AWS IAM Role. - - Attributes: - iam_role_id (str): - The user’s AWS IAM Role that trusts the - Google-owned AWS IAM user Connection. - iam_user_id (str): - Output only. Google-owned AWS IAM User for a - Connection. - external_id (str): - Output only. A Google-generated id for representing - Connection’s identity in AWS. External Id is also used for - preventing the Confused Deputy Problem. See - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user_externalid.html - """ - - iam_role_id: str = proto.Field( - proto.STRING, - number=1, - ) - iam_user_id: str = proto.Field( - proto.STRING, - number=2, - ) - external_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class AwsAccessRole(proto.Message): - r"""Authentication method for Amazon Web Services (AWS) that uses - Google owned Google service account to assume into customer's - AWS IAM Role. - - Attributes: - iam_role_id (str): - The user’s AWS IAM Role that trusts the - Google-owned AWS IAM user Connection. - identity (str): - A unique Google-owned and Google-generated - identity for the Connection. This identity will - be used to access the user's AWS IAM Role. - """ - - iam_role_id: str = proto.Field( - proto.STRING, - number=1, - ) - identity: str = proto.Field( - proto.STRING, - number=2, - ) - - -class AzureProperties(proto.Message): - r"""Container for connection properties specific to Azure. - - Attributes: - application (str): - Output only. The name of the Azure Active - Directory Application. - client_id (str): - Output only. The client id of the Azure - Active Directory Application. - object_id (str): - Output only. The object id of the Azure - Active Directory Application. - customer_tenant_id (str): - The id of customer's directory that host the - data. - redirect_uri (str): - The URL user will be redirected to after - granting consent during connection setup. - federated_application_client_id (str): - The client ID of the user's Azure Active - Directory Application used for a federated - connection. - identity (str): - Output only. A unique Google-owned and - Google-generated identity for the Connection. - This identity will be used to access the user's - Azure Active Directory Application. - """ - - application: str = proto.Field( - proto.STRING, - number=1, - ) - client_id: str = proto.Field( - proto.STRING, - number=2, - ) - object_id: str = proto.Field( - proto.STRING, - number=3, - ) - customer_tenant_id: str = proto.Field( - proto.STRING, - number=4, - ) - redirect_uri: str = proto.Field( - proto.STRING, - number=5, - ) - federated_application_client_id: str = proto.Field( - proto.STRING, - number=6, - ) - identity: str = proto.Field( - proto.STRING, - number=7, - ) - - -class CloudResourceProperties(proto.Message): - r"""Container for connection properties for delegation of access - to GCP resources. - - Attributes: - service_account_id (str): - Output only. The account ID of the service - created for the purpose of this connection. - - The service account does not have any - permissions associated with it when it is - created. After creation, customers delegate - permissions to the service account. When the - connection is used in the context of an - operation in BigQuery, the service account will - be used to connect to the desired resources in - GCP. - - The account ID is in the form of: - - @gcp-sa-bigquery-cloudresource.iam.gserviceaccount.com - """ - - service_account_id: str = proto.Field( - proto.STRING, - number=1, - ) - - -class MetastoreServiceConfig(proto.Message): - r"""Configuration of the Dataproc Metastore Service. - - Attributes: - metastore_service (str): - Optional. Resource name of an existing Dataproc Metastore - service. - - Example: - - - ``projects/[project_id]/locations/[region]/services/[service_id]`` - """ - - metastore_service: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SparkHistoryServerConfig(proto.Message): - r"""Configuration of the Spark History Server. - - Attributes: - dataproc_cluster (str): - Optional. Resource name of an existing Dataproc Cluster to - act as a Spark History Server for the connection. - - Example: - - - ``projects/[project_id]/regions/[region]/clusters/[cluster_name]`` - """ - - dataproc_cluster: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SparkProperties(proto.Message): - r"""Container for connection properties to execute stored - procedures for Apache Spark. - - Attributes: - service_account_id (str): - Output only. The account ID of the service - created for the purpose of this connection. - - The service account does not have any - permissions associated with it when it is - created. After creation, customers delegate - permissions to the service account. When the - connection is used in the context of a stored - procedure for Apache Spark in BigQuery, the - service account is used to connect to the - desired resources in Google Cloud. - - The account ID is in the form of: - - bqcx--@gcp-sa-bigquery-consp.iam.gserviceaccount.com - metastore_service_config (google.cloud.bigquery_connection_v1.types.MetastoreServiceConfig): - Optional. Dataproc Metastore Service - configuration for the connection. - spark_history_server_config (google.cloud.bigquery_connection_v1.types.SparkHistoryServerConfig): - Optional. Spark History Server configuration - for the connection. - """ - - service_account_id: str = proto.Field( - proto.STRING, - number=1, - ) - metastore_service_config: 'MetastoreServiceConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='MetastoreServiceConfig', - ) - spark_history_server_config: 'SparkHistoryServerConfig' = proto.Field( - proto.MESSAGE, - number=4, - message='SparkHistoryServerConfig', - ) - - -class SalesforceDataCloudProperties(proto.Message): - r"""Connection properties specific to Salesforce DataCloud. This - is intended for use only by Salesforce partner projects. - - Attributes: - instance_uri (str): - The URL to the user's Salesforce DataCloud - instance. - identity (str): - Output only. A unique Google-owned and - Google-generated service account identity for - the connection. - tenant_id (str): - The ID of the user's Salesforce tenant. - """ - - instance_uri: str = proto.Field( - proto.STRING, - number=1, - ) - identity: str = proto.Field( - proto.STRING, - number=2, - ) - tenant_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-connection/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/noxfile.py deleted file mode 100644 index b48463a5bc19..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-connection' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_connection_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_connection_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_create_connection_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_create_connection_async.py deleted file mode 100644 index e006c43ac3f9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_create_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_CreateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -async def sample_create_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.CreateConnectionRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_connection(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_CreateConnection_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_create_connection_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_create_connection_sync.py deleted file mode 100644 index 8f77b2b55fac..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_create_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_CreateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -def sample_create_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.CreateConnectionRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_connection(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_CreateConnection_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_delete_connection_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_delete_connection_async.py deleted file mode 100644 index 0afb14278d30..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_delete_connection_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_DeleteConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -async def sample_delete_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - await client.delete_connection(request=request) - - -# [END bigqueryconnection_v1_generated_ConnectionService_DeleteConnection_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_delete_connection_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_delete_connection_sync.py deleted file mode 100644 index b18ae28494ad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_delete_connection_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_DeleteConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -def sample_delete_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.DeleteConnectionRequest( - name="name_value", - ) - - # Make the request - client.delete_connection(request=request) - - -# [END bigqueryconnection_v1_generated_ConnectionService_DeleteConnection_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_connection_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_connection_async.py deleted file mode 100644 index 6cfb21511ada..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_GetConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -async def sample_get_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.get_connection(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_GetConnection_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_connection_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_connection_sync.py deleted file mode 100644 index 0662e6d09adb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_GetConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -def sample_get_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.GetConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.get_connection(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_GetConnection_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_iam_policy_async.py deleted file mode 100644 index 3cfd6eb8f5dc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_iam_policy_sync.py deleted file mode 100644 index 00ec9e2b66f3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_list_connections_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_list_connections_async.py deleted file mode 100644 index 8fbb2a541ad7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_list_connections_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_ListConnections_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -async def sample_list_connections(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.ListConnectionsRequest( - parent="parent_value", - page_size=951, - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_ListConnections_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_list_connections_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_list_connections_sync.py deleted file mode 100644 index 57a1702e88a1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_list_connections_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListConnections -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_ListConnections_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -def sample_list_connections(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.ListConnectionsRequest( - parent="parent_value", - page_size=951, - ) - - # Make the request - page_result = client.list_connections(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_ListConnections_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_set_iam_policy_async.py deleted file mode 100644 index 2d56938d7c31..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_set_iam_policy_sync.py deleted file mode 100644 index 6b43cc583d53..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_test_iam_permissions_async.py deleted file mode 100644 index ebae8b59a640..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_test_iam_permissions_sync.py deleted file mode 100644 index 81ac3bc3a6b1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_update_connection_async.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_update_connection_async.py deleted file mode 100644 index 6c1a686f4c92..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_update_connection_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_UpdateConnection_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -async def sample_update_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.UpdateConnectionRequest( - name="name_value", - ) - - # Make the request - response = await client.update_connection(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_UpdateConnection_async] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_update_connection_sync.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_update_connection_sync.py deleted file mode 100644 index b898996edd72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/bigqueryconnection_v1_generated_connection_service_update_connection_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateConnection -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-connection - - -# [START bigqueryconnection_v1_generated_ConnectionService_UpdateConnection_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_connection_v1 - - -def sample_update_connection(): - # Create a client - client = bigquery_connection_v1.ConnectionServiceClient() - - # Initialize request argument(s) - request = bigquery_connection_v1.UpdateConnectionRequest( - name="name_value", - ) - - # Make the request - response = client.update_connection(request=request) - - # Handle the response - print(response) - -# [END bigqueryconnection_v1_generated_ConnectionService_UpdateConnection_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json b/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json deleted file mode 100644 index a25b5a29d478..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json +++ /dev/null @@ -1,1337 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.connection.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-connection", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.create_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.bigquery_connection_v1.types.Connection" - }, - { - "name": "connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.types.Connection", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "bigqueryconnection_v1_generated_connection_service_create_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_CreateConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_create_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.create_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.CreateConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "CreateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.CreateConnectionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.bigquery_connection_v1.types.Connection" - }, - { - "name": "connection_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.types.Connection", - "shortName": "create_connection" - }, - "description": "Sample for CreateConnection", - "file": "bigqueryconnection_v1_generated_connection_service_create_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_CreateConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_create_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.delete_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.DeleteConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "bigqueryconnection_v1_generated_connection_service_delete_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_DeleteConnection_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_delete_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.delete_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.DeleteConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "DeleteConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.DeleteConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_connection" - }, - "description": "Sample for DeleteConnection", - "file": "bigqueryconnection_v1_generated_connection_service_delete_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_DeleteConnection_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_delete_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.get_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.GetConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "bigqueryconnection_v1_generated_connection_service_get_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_GetConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_get_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.get_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.GetConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "GetConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.GetConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.types.Connection", - "shortName": "get_connection" - }, - "description": "Sample for GetConnection", - "file": "bigqueryconnection_v1_generated_connection_service_get_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_GetConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_get_connection_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "bigqueryconnection_v1_generated_connection_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "bigqueryconnection_v1_generated_connection_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.list_connections", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.ListConnections", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsAsyncPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "bigqueryconnection_v1_generated_connection_service_list_connections_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_ListConnections_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_list_connections_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.list_connections", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.ListConnections", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "ListConnections" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.ListConnectionsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.services.connection_service.pagers.ListConnectionsPager", - "shortName": "list_connections" - }, - "description": "Sample for ListConnections", - "file": "bigqueryconnection_v1_generated_connection_service_list_connections_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_ListConnections_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_list_connections_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "bigqueryconnection_v1_generated_connection_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "bigqueryconnection_v1_generated_connection_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "bigqueryconnection_v1_generated_connection_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "permissions", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "bigqueryconnection_v1_generated_connection_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient", - "shortName": "ConnectionServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceAsyncClient.update_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.bigquery_connection_v1.types.Connection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.types.Connection", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "bigqueryconnection_v1_generated_connection_service_update_connection_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_UpdateConnection_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_update_connection_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient", - "shortName": "ConnectionServiceClient" - }, - "fullName": "google.cloud.bigquery_connection_v1.ConnectionServiceClient.update_connection", - "method": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService.UpdateConnection", - "service": { - "fullName": "google.cloud.bigquery.connection.v1.ConnectionService", - "shortName": "ConnectionService" - }, - "shortName": "UpdateConnection" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_connection_v1.types.UpdateConnectionRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "connection", - "type": "google.cloud.bigquery_connection_v1.types.Connection" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_connection_v1.types.Connection", - "shortName": "update_connection" - }, - "description": "Sample for UpdateConnection", - "file": "bigqueryconnection_v1_generated_connection_service_update_connection_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryconnection_v1_generated_ConnectionService_UpdateConnection_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryconnection_v1_generated_connection_service_update_connection_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/scripts/fixup_bigquery_connection_v1_keywords.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/scripts/fixup_bigquery_connection_v1_keywords.py deleted file mode 100644 index 464c50426b9e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/scripts/fixup_bigquery_connection_v1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_connectionCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_connection': ('parent', 'connection', 'connection_id', ), - 'delete_connection': ('name', ), - 'get_connection': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'list_connections': ('parent', 'page_size', 'page_token', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_connection': ('name', 'connection', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_connectionCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_connection client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/setup.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/setup.py deleted file mode 100644 index 77ea204ea819..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-connection' - - -description = "Google Cloud Bigquery Connection API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_connection/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-connection" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/bigquery_connection_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/bigquery_connection_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/bigquery_connection_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py b/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py deleted file mode 100644 index be5628a2c489..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-connection/v1/tests/unit/gapic/bigquery_connection_v1/test_connection_service.py +++ /dev/null @@ -1,7391 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_connection_v1.services.connection_service import ConnectionServiceAsyncClient -from google.cloud.bigquery_connection_v1.services.connection_service import ConnectionServiceClient -from google.cloud.bigquery_connection_v1.services.connection_service import pagers -from google.cloud.bigquery_connection_v1.services.connection_service import transports -from google.cloud.bigquery_connection_v1.types import connection -from google.cloud.bigquery_connection_v1.types import connection as gcbc_connection -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ConnectionServiceClient._get_default_mtls_endpoint(None) is None - assert ConnectionServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ConnectionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ConnectionServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ConnectionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ConnectionServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert ConnectionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ConnectionServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ConnectionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ConnectionServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ConnectionServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ConnectionServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ConnectionServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - ConnectionServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ConnectionServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert ConnectionServiceClient._get_client_cert_source(None, False) is None - assert ConnectionServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert ConnectionServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ConnectionServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert ConnectionServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(ConnectionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceClient)) -@mock.patch.object(ConnectionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = ConnectionServiceClient._DEFAULT_UNIVERSE - default_endpoint = ConnectionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ConnectionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert ConnectionServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ConnectionServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT - assert ConnectionServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ConnectionServiceClient._get_api_endpoint(None, None, default_universe, "always") == ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT - assert ConnectionServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ConnectionServiceClient.DEFAULT_MTLS_ENDPOINT - assert ConnectionServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ConnectionServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - ConnectionServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ConnectionServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ConnectionServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ConnectionServiceClient._get_universe_domain(None, None) == ConnectionServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - ConnectionServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = ConnectionServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = ConnectionServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (ConnectionServiceClient, "grpc"), - (ConnectionServiceAsyncClient, "grpc_asyncio"), - (ConnectionServiceClient, "rest"), -]) -def test_connection_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigqueryconnection.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigqueryconnection.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ConnectionServiceGrpcTransport, "grpc"), - (transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ConnectionServiceRestTransport, "rest"), -]) -def test_connection_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ConnectionServiceClient, "grpc"), - (ConnectionServiceAsyncClient, "grpc_asyncio"), - (ConnectionServiceClient, "rest"), -]) -def test_connection_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigqueryconnection.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigqueryconnection.googleapis.com' - ) - - -def test_connection_service_client_get_transport_class(): - transport = ConnectionServiceClient.get_transport_class() - available_transports = [ - transports.ConnectionServiceGrpcTransport, - transports.ConnectionServiceRestTransport, - ] - assert transport in available_transports - - transport = ConnectionServiceClient.get_transport_class("grpc") - assert transport == transports.ConnectionServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest"), -]) -@mock.patch.object(ConnectionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceClient)) -@mock.patch.object(ConnectionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceAsyncClient)) -def test_connection_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ConnectionServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ConnectionServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc", "true"), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc", "false"), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest", "true"), - (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(ConnectionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceClient)) -@mock.patch.object(ConnectionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_connection_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ConnectionServiceClient, ConnectionServiceAsyncClient -]) -@mock.patch.object(ConnectionServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConnectionServiceClient)) -@mock.patch.object(ConnectionServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConnectionServiceAsyncClient)) -def test_connection_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - ConnectionServiceClient, ConnectionServiceAsyncClient -]) -@mock.patch.object(ConnectionServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceClient)) -@mock.patch.object(ConnectionServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConnectionServiceAsyncClient)) -def test_connection_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = ConnectionServiceClient._DEFAULT_UNIVERSE - default_endpoint = ConnectionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ConnectionServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc"), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest"), -]) -def test_connection_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc", grpc_helpers), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ConnectionServiceClient, transports.ConnectionServiceRestTransport, "rest", None), -]) -def test_connection_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_connection_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ConnectionServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport, "grpc", grpc_helpers), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_connection_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigqueryconnection.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigqueryconnection.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gcbc_connection.CreateConnectionRequest, - dict, -]) -def test_create_connection(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - ) - response = client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcbc_connection.CreateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbc_connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -def test_create_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcbc_connection.CreateConnectionRequest( - parent='parent_value', - connection_id='connection_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcbc_connection.CreateConnectionRequest( - parent='parent_value', - connection_id='connection_id_value', - ) - -def test_create_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_connection] = mock_rpc - request = {} - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_connection] = mock_rpc - - request = {} - await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_connection_async(transport: str = 'grpc_asyncio', request_type=gcbc_connection.CreateConnectionRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - )) - response = await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcbc_connection.CreateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbc_connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -@pytest.mark.asyncio -async def test_create_connection_async_from_dict(): - await test_create_connection_async(request_type=dict) - -def test_create_connection_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbc_connection.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = gcbc_connection.Connection() - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_connection_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbc_connection.CreateConnectionRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection()) - await client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_connection_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbc_connection.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_connection( - parent='parent_value', - connection=gcbc_connection.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = gcbc_connection.Connection(name='name_value') - assert arg == mock_val - arg = args[0].connection_id - mock_val = 'connection_id_value' - assert arg == mock_val - - -def test_create_connection_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - gcbc_connection.CreateConnectionRequest(), - parent='parent_value', - connection=gcbc_connection.Connection(name='name_value'), - connection_id='connection_id_value', - ) - -@pytest.mark.asyncio -async def test_create_connection_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbc_connection.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_connection( - parent='parent_value', - connection=gcbc_connection.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].connection - mock_val = gcbc_connection.Connection(name='name_value') - assert arg == mock_val - arg = args[0].connection_id - mock_val = 'connection_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_connection_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_connection( - gcbc_connection.CreateConnectionRequest(), - parent='parent_value', - connection=gcbc_connection.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - connection.GetConnectionRequest, - dict, -]) -def test_get_connection(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - ) - response = client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = connection.GetConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -def test_get_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = connection.GetConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == connection.GetConnectionRequest( - name='name_value', - ) - -def test_get_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc - request = {} - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_connection] = mock_rpc - - request = {} - await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_connection_async(transport: str = 'grpc_asyncio', request_type=connection.GetConnectionRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - )) - response = await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = connection.GetConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -@pytest.mark.asyncio -async def test_get_connection_async_from_dict(): - await test_get_connection_async(request_type=dict) - -def test_get_connection_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = connection.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = connection.Connection() - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_connection_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = connection.GetConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(connection.Connection()) - await client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_connection_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = connection.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_connection_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - connection.GetConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_connection_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = connection.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(connection.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_connection_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_connection( - connection.GetConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - connection.ListConnectionsRequest, - dict, -]) -def test_list_connections(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = connection.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = connection.ListConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_connections_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = connection.ListConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_connections(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == connection.ListConnectionsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_connections_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_connections] = mock_rpc - request = {} - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_connections in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_connections] = mock_rpc - - request = {} - await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_connections_async(transport: str = 'grpc_asyncio', request_type=connection.ListConnectionsRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(connection.ListConnectionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = connection.ListConnectionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_connections_async_from_dict(): - await test_list_connections_async(request_type=dict) - -def test_list_connections_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = connection.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = connection.ListConnectionsResponse() - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_connections_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = connection.ListConnectionsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(connection.ListConnectionsResponse()) - await client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_connections_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = connection.ListConnectionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_connections_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - connection.ListConnectionsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_connections_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = connection.ListConnectionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(connection.ListConnectionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_connections( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_connections_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_connections( - connection.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_pager(transport_name: str = "grpc"): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - connection.Connection(), - ], - next_page_token='abc', - ), - connection.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - ], - next_page_token='ghi', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_connections(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, connection.Connection) - for i in results) -def test_list_connections_pages(transport_name: str = "grpc"): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - connection.Connection(), - ], - next_page_token='abc', - ), - connection.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - ], - next_page_token='ghi', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - ], - ), - RuntimeError, - ) - pages = list(client.list_connections(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_connections_async_pager(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - connection.Connection(), - ], - next_page_token='abc', - ), - connection.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - ], - next_page_token='ghi', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_connections(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, connection.Connection) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_connections_async_pages(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - connection.Connection(), - ], - next_page_token='abc', - ), - connection.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - ], - next_page_token='ghi', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_connections(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - gcbc_connection.UpdateConnectionRequest, - dict, -]) -def test_update_connection(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - ) - response = client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcbc_connection.UpdateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbc_connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -def test_update_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcbc_connection.UpdateConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcbc_connection.UpdateConnectionRequest( - name='name_value', - ) - -def test_update_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_connection] = mock_rpc - request = {} - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_connection] = mock_rpc - - request = {} - await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_connection_async(transport: str = 'grpc_asyncio', request_type=gcbc_connection.UpdateConnectionRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - )) - response = await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcbc_connection.UpdateConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbc_connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -@pytest.mark.asyncio -async def test_update_connection_async_from_dict(): - await test_update_connection_async(request_type=dict) - -def test_update_connection_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbc_connection.UpdateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = gcbc_connection.Connection() - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_connection_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbc_connection.UpdateConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection()) - await client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_update_connection_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbc_connection.Connection() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_connection( - name='name_value', - connection=gcbc_connection.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].connection - mock_val = gcbc_connection.Connection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_connection_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - gcbc_connection.UpdateConnectionRequest(), - name='name_value', - connection=gcbc_connection.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_connection_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbc_connection.Connection() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_connection( - name='name_value', - connection=gcbc_connection.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].connection - mock_val = gcbc_connection.Connection(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_connection_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_connection( - gcbc_connection.UpdateConnectionRequest(), - name='name_value', - connection=gcbc_connection.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - connection.DeleteConnectionRequest, - dict, -]) -def test_delete_connection(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = connection.DeleteConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = connection.DeleteConnectionRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_connection(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == connection.DeleteConnectionRequest( - name='name_value', - ) - -def test_delete_connection_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_connection] = mock_rpc - request = {} - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_connection in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_connection] = mock_rpc - - request = {} - await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_connection_async(transport: str = 'grpc_asyncio', request_type=connection.DeleteConnectionRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = connection.DeleteConnectionRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_connection_async_from_dict(): - await test_delete_connection_async(request_type=dict) - -def test_delete_connection_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = connection.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = None - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_connection_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = connection.DeleteConnectionRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_connection_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_connection_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - connection.DeleteConnectionRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_connection_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_connection( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_connection_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_connection( - connection.DeleteConnectionRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_get_iam_policy_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_get_iam_policy_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_iam_policy_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -def test_set_iam_policy_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - - -def test_set_iam_policy_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.set_iam_policy( - resource='resource_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_set_iam_policy_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_test_iam_permissions_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - - -def test_test_iam_permissions_flattened_error(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.test_iam_permissions( - resource='resource_value', - permissions=['permissions_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].resource - mock_val = 'resource_value' - assert arg == mock_val - arg = args[0].permissions - mock_val = ['permissions_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_test_iam_permissions_flattened_error_async(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_create_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_connection] = mock_rpc - - request = {} - client.create_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_connection_rest_required_fields(request_type=gcbc_connection.CreateConnectionRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("connection_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcbc_connection.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbc_connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_connection_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("connectionId", )) & set(("parent", "connection", ))) - - -def test_create_connection_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbc_connection.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - connection=gcbc_connection.Connection(name='name_value'), - connection_id='connection_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcbc_connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_create_connection_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_connection( - gcbc_connection.CreateConnectionRequest(), - parent='parent_value', - connection=gcbc_connection.Connection(name='name_value'), - connection_id='connection_id_value', - ) - - -def test_get_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_connection] = mock_rpc - - request = {} - client.get_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_connection_rest_required_fields(request_type=connection.GetConnectionRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = connection.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_connection_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_connection_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = connection.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_get_connection_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_connection( - connection.GetConnectionRequest(), - name='name_value', - ) - - -def test_list_connections_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_connections in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_connections] = mock_rpc - - request = {} - client.list_connections(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_connections(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_connections_rest_required_fields(request_type=connection.ListConnectionsRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["page_size"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "pageSize" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "pageSize" in jsonified_request - assert jsonified_request["pageSize"] == request_init["page_size"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["pageSize"] = 951 - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_connections._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "pageSize" in jsonified_request - assert jsonified_request["pageSize"] == 951 - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = connection.ListConnectionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = connection.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_connections(request) - - expected_params = [ - ( - "pageSize", - str(0), - ), - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_connections_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_connections._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", "pageSize", ))) - - -def test_list_connections_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = connection.ListConnectionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = connection.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_connections(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/connections" % client.transport._host, args[1]) - - -def test_list_connections_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_connections( - connection.ListConnectionsRequest(), - parent='parent_value', - ) - - -def test_list_connections_rest_pager(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - connection.Connection(), - ], - next_page_token='abc', - ), - connection.ListConnectionsResponse( - connections=[], - next_page_token='def', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - ], - next_page_token='ghi', - ), - connection.ListConnectionsResponse( - connections=[ - connection.Connection(), - connection.Connection(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(connection.ListConnectionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_connections(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, connection.Connection) - for i in results) - - pages = list(client.list_connections(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_update_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_connection] = mock_rpc - - request = {} - client.update_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_connection_rest_required_fields(request_type=gcbc_connection.UpdateConnectionRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_connection._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcbc_connection.Connection() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbc_connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_connection_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "connection", "updateMask", ))) - - -def test_update_connection_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbc_connection.Connection() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - connection=gcbc_connection.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcbc_connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_update_connection_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_connection( - gcbc_connection.UpdateConnectionRequest(), - name='name_value', - connection=gcbc_connection.Connection(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_connection_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_connection in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_connection] = mock_rpc - - request = {} - client.delete_connection(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_connection(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_connection_rest_required_fields(request_type=connection.DeleteConnectionRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_connection._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_connection(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_connection_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_connection._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_connection_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_connection(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/connections/*}" % client.transport._host, args[1]) - - -def test_delete_connection_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_connection( - connection.DeleteConnectionRequest(), - name='name_value', - ) - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", ))) - - -def test_get_iam_policy_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/locations/*/connections/*}:getIamPolicy" % client.transport._host, args[1]) - - -def test_get_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_iam_policy( - iam_policy_pb2.GetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_set_iam_policy_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.set_iam_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/locations/*/connections/*}:setIamPolicy" % client.transport._host, args[1]) - - -def test_set_iam_policy_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.set_iam_policy( - iam_policy_pb2.SetIamPolicyRequest(), - resource='resource_value', - ) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.ConnectionServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.ConnectionServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_test_iam_permissions_rest_flattened(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - resource='resource_value', - permissions=['permissions_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.test_iam_permissions(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{resource=projects/*/locations/*/connections/*}:testIamPermissions" % client.transport._host, args[1]) - - -def test_test_iam_permissions_rest_flattened_error(transport: str = 'rest'): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.test_iam_permissions( - iam_policy_pb2.TestIamPermissionsRequest(), - resource='resource_value', - permissions=['permissions_value'], - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConnectionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConnectionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConnectionServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ConnectionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConnectionServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConnectionServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ConnectionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConnectionServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConnectionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ConnectionServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConnectionServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ConnectionServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ConnectionServiceGrpcTransport, - transports.ConnectionServiceGrpcAsyncIOTransport, - transports.ConnectionServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = ConnectionServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_connection_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - call.return_value = gcbc_connection.Connection() - client.create_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbc_connection.CreateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_connection_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - call.return_value = connection.Connection() - client.get_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.GetConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_connections_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - call.return_value = connection.ListConnectionsResponse() - client.list_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.ListConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_connection_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - call.return_value = gcbc_connection.Connection() - client.update_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbc_connection.UpdateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_connection_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - call.return_value = None - client.delete_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.DeleteConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = ConnectionServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_connection_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - )) - await client.create_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbc_connection.CreateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_connection_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - )) - await client.get_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.GetConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_connections_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(connection.ListConnectionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.ListConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_connection_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - )) - await client.update_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbc_connection.UpdateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_connection_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.DeleteConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = ConnectionServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_connection_rest_bad_request(request_type=gcbc_connection.CreateConnectionRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_connection(request) - - -@pytest.mark.parametrize("request_type", [ - gcbc_connection.CreateConnectionRequest, - dict, -]) -def test_create_connection_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["connection"] = {'name': 'name_value', 'friendly_name': 'friendly_name_value', 'description': 'description_value', 'cloud_sql': {'instance_id': 'instance_id_value', 'database': 'database_value', 'type_': 1, 'credential': {'username': 'username_value', 'password': 'password_value'}, 'service_account_id': 'service_account_id_value'}, 'aws': {'cross_account_role': {'iam_role_id': 'iam_role_id_value', 'iam_user_id': 'iam_user_id_value', 'external_id': 'external_id_value'}, 'access_role': {'iam_role_id': 'iam_role_id_value', 'identity': 'identity_value'}}, 'azure': {'application': 'application_value', 'client_id': 'client_id_value', 'object_id': 'object_id_value', 'customer_tenant_id': 'customer_tenant_id_value', 'redirect_uri': 'redirect_uri_value', 'federated_application_client_id': 'federated_application_client_id_value', 'identity': 'identity_value'}, 'cloud_spanner': {'database': 'database_value', 'use_parallelism': True, 'max_parallelism': 1595, 'use_serverless_analytics': True, 'use_data_boost': True, 'database_role': 'database_role_value'}, 'cloud_resource': {'service_account_id': 'service_account_id_value'}, 'spark': {'service_account_id': 'service_account_id_value', 'metastore_service_config': {'metastore_service': 'metastore_service_value'}, 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}, 'salesforce_data_cloud': {'instance_uri': 'instance_uri_value', 'identity': 'identity_value', 'tenant_id': 'tenant_id_value'}, 'creation_time': 1379, 'last_modified_time': 1890, 'has_credential': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcbc_connection.CreateConnectionRequest.meta.fields["connection"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["connection"][field])): - del request_init["connection"][field][i][subfield] - else: - del request_init["connection"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbc_connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbc_connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_connection_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_create_connection") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_create_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_create_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcbc_connection.CreateConnectionRequest.pb(gcbc_connection.CreateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcbc_connection.Connection.to_json(gcbc_connection.Connection()) - req.return_value.content = return_value - - request = gcbc_connection.CreateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcbc_connection.Connection() - post_with_metadata.return_value = gcbc_connection.Connection(), metadata - - client.create_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_connection_rest_bad_request(request_type=connection.GetConnectionRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_connection(request) - - -@pytest.mark.parametrize("request_type", [ - connection.GetConnectionRequest, - dict, -]) -def test_get_connection_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_connection_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_get_connection") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_get_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_get_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = connection.GetConnectionRequest.pb(connection.GetConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = connection.Connection.to_json(connection.Connection()) - req.return_value.content = return_value - - request = connection.GetConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = connection.Connection() - post_with_metadata.return_value = connection.Connection(), metadata - - client.get_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_connections_rest_bad_request(request_type=connection.ListConnectionsRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_connections(request) - - -@pytest.mark.parametrize("request_type", [ - connection.ListConnectionsRequest, - dict, -]) -def test_list_connections_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = connection.ListConnectionsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = connection.ListConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_connections_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_list_connections") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_list_connections_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_list_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = connection.ListConnectionsRequest.pb(connection.ListConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = connection.ListConnectionsResponse.to_json(connection.ListConnectionsResponse()) - req.return_value.content = return_value - - request = connection.ListConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = connection.ListConnectionsResponse() - post_with_metadata.return_value = connection.ListConnectionsResponse(), metadata - - client.list_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_connection_rest_bad_request(request_type=gcbc_connection.UpdateConnectionRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_connection(request) - - -@pytest.mark.parametrize("request_type", [ - gcbc_connection.UpdateConnectionRequest, - dict, -]) -def test_update_connection_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request_init["connection"] = {'name': 'name_value', 'friendly_name': 'friendly_name_value', 'description': 'description_value', 'cloud_sql': {'instance_id': 'instance_id_value', 'database': 'database_value', 'type_': 1, 'credential': {'username': 'username_value', 'password': 'password_value'}, 'service_account_id': 'service_account_id_value'}, 'aws': {'cross_account_role': {'iam_role_id': 'iam_role_id_value', 'iam_user_id': 'iam_user_id_value', 'external_id': 'external_id_value'}, 'access_role': {'iam_role_id': 'iam_role_id_value', 'identity': 'identity_value'}}, 'azure': {'application': 'application_value', 'client_id': 'client_id_value', 'object_id': 'object_id_value', 'customer_tenant_id': 'customer_tenant_id_value', 'redirect_uri': 'redirect_uri_value', 'federated_application_client_id': 'federated_application_client_id_value', 'identity': 'identity_value'}, 'cloud_spanner': {'database': 'database_value', 'use_parallelism': True, 'max_parallelism': 1595, 'use_serverless_analytics': True, 'use_data_boost': True, 'database_role': 'database_role_value'}, 'cloud_resource': {'service_account_id': 'service_account_id_value'}, 'spark': {'service_account_id': 'service_account_id_value', 'metastore_service_config': {'metastore_service': 'metastore_service_value'}, 'spark_history_server_config': {'dataproc_cluster': 'dataproc_cluster_value'}}, 'salesforce_data_cloud': {'instance_uri': 'instance_uri_value', 'identity': 'identity_value', 'tenant_id': 'tenant_id_value'}, 'creation_time': 1379, 'last_modified_time': 1890, 'has_credential': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcbc_connection.UpdateConnectionRequest.meta.fields["connection"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["connection"][field])): - del request_init["connection"][field][i][subfield] - else: - del request_init["connection"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbc_connection.Connection( - name='name_value', - friendly_name='friendly_name_value', - description='description_value', - creation_time=1379, - last_modified_time=1890, - has_credential=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbc_connection.Connection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbc_connection.Connection) - assert response.name == 'name_value' - assert response.friendly_name == 'friendly_name_value' - assert response.description == 'description_value' - assert response.creation_time == 1379 - assert response.last_modified_time == 1890 - assert response.has_credential is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_connection_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_update_connection") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_update_connection_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_update_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcbc_connection.UpdateConnectionRequest.pb(gcbc_connection.UpdateConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcbc_connection.Connection.to_json(gcbc_connection.Connection()) - req.return_value.content = return_value - - request = gcbc_connection.UpdateConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcbc_connection.Connection() - post_with_metadata.return_value = gcbc_connection.Connection(), metadata - - client.update_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_connection_rest_bad_request(request_type=connection.DeleteConnectionRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_connection(request) - - -@pytest.mark.parametrize("request_type", [ - connection.DeleteConnectionRequest, - dict, -]) -def test_delete_connection_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_connection(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_connection_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_delete_connection") as pre: - pre.assert_not_called() - pb_message = connection.DeleteConnectionRequest.pb(connection.DeleteConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = connection.DeleteConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/connections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.ConnectionServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConnectionServiceRestInterceptor(), - ) - client = ConnectionServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ConnectionServiceRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_connection_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_connection), - '__call__') as call: - client.create_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbc_connection.CreateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_connection_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_connection), - '__call__') as call: - client.get_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.GetConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_connections_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_connections), - '__call__') as call: - client.list_connections(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.ListConnectionsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_connection_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_connection), - '__call__') as call: - client.update_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbc_connection.UpdateConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_connection_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_connection), - '__call__') as call: - client.delete_connection(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = connection.DeleteConnectionRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ConnectionServiceGrpcTransport, - ) - -def test_connection_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ConnectionServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_connection_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ConnectionServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_connection', - 'get_connection', - 'list_connections', - 'update_connection', - 'delete_connection', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_connection_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConnectionServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_connection_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_connection_v1.services.connection_service.transports.ConnectionServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConnectionServiceTransport() - adc.assert_called_once() - - -def test_connection_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ConnectionServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConnectionServiceGrpcTransport, - transports.ConnectionServiceGrpcAsyncIOTransport, - ], -) -def test_connection_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConnectionServiceGrpcTransport, - transports.ConnectionServiceGrpcAsyncIOTransport, - transports.ConnectionServiceRestTransport, - ], -) -def test_connection_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ConnectionServiceGrpcTransport, grpc_helpers), - (transports.ConnectionServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_connection_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigqueryconnection.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigqueryconnection.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ConnectionServiceGrpcTransport, transports.ConnectionServiceGrpcAsyncIOTransport]) -def test_connection_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_connection_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ConnectionServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_connection_service_host_no_port(transport_name): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigqueryconnection.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigqueryconnection.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigqueryconnection.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_connection_service_host_with_port(transport_name): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigqueryconnection.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigqueryconnection.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigqueryconnection.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_connection_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ConnectionServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ConnectionServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_connection._session - session2 = client2.transport.create_connection._session - assert session1 != session2 - session1 = client1.transport.get_connection._session - session2 = client2.transport.get_connection._session - assert session1 != session2 - session1 = client1.transport.list_connections._session - session2 = client2.transport.list_connections._session - assert session1 != session2 - session1 = client1.transport.update_connection._session - session2 = client2.transport.update_connection._session - assert session1 != session2 - session1 = client1.transport.delete_connection._session - session2 = client2.transport.delete_connection._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 -def test_connection_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ConnectionServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_connection_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ConnectionServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConnectionServiceGrpcTransport, transports.ConnectionServiceGrpcAsyncIOTransport]) -def test_connection_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ConnectionServiceGrpcTransport, transports.ConnectionServiceGrpcAsyncIOTransport]) -def test_connection_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_cluster_path(): - project = "squid" - region = "clam" - cluster = "whelk" - expected = "projects/{project}/regions/{region}/clusters/{cluster}".format(project=project, region=region, cluster=cluster, ) - actual = ConnectionServiceClient.cluster_path(project, region, cluster) - assert expected == actual - - -def test_parse_cluster_path(): - expected = { - "project": "octopus", - "region": "oyster", - "cluster": "nudibranch", - } - path = ConnectionServiceClient.cluster_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_cluster_path(path) - assert expected == actual - -def test_connection_path(): - project = "cuttlefish" - location = "mussel" - connection = "winkle" - expected = "projects/{project}/locations/{location}/connections/{connection}".format(project=project, location=location, connection=connection, ) - actual = ConnectionServiceClient.connection_path(project, location, connection) - assert expected == actual - - -def test_parse_connection_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "connection": "abalone", - } - path = ConnectionServiceClient.connection_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_connection_path(path) - assert expected == actual - -def test_service_path(): - project = "squid" - location = "clam" - service = "whelk" - expected = "projects/{project}/locations/{location}/services/{service}".format(project=project, location=location, service=service, ) - actual = ConnectionServiceClient.service_path(project, location, service) - assert expected == actual - - -def test_parse_service_path(): - expected = { - "project": "octopus", - "location": "oyster", - "service": "nudibranch", - } - path = ConnectionServiceClient.service_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_service_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ConnectionServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = ConnectionServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = ConnectionServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = ConnectionServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ConnectionServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = ConnectionServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = ConnectionServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = ConnectionServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ConnectionServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = ConnectionServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ConnectionServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ConnectionServiceTransport, '_prep_wrapped_messages') as prep: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ConnectionServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ConnectionServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = ConnectionServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = ConnectionServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ConnectionServiceClient, transports.ConnectionServiceGrpcTransport), - (ConnectionServiceAsyncClient, transports.ConnectionServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/.coveragerc deleted file mode 100644 index 8a6e1114e730..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_data_exchange/__init__.py - google/cloud/bigquery_data_exchange/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/.flake8 b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/MANIFEST.in deleted file mode 100644 index fe09cf39b079..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_data_exchange *.py -recursive-include google/cloud/bigquery_data_exchange_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/README.rst b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/README.rst deleted file mode 100644 index da666a8f9320..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Data Exchange API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Data Exchange API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/analytics_hub_service.rst b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/analytics_hub_service.rst deleted file mode 100644 index e90653067bdf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/analytics_hub_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -AnalyticsHubService -------------------------------------- - -.. automodule:: google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/services_.rst b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/services_.rst deleted file mode 100644 index bce322913769..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Data Exchange v1beta1 API -============================================================ -.. toctree:: - :maxdepth: 2 - - analytics_hub_service diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/types_.rst b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/types_.rst deleted file mode 100644 index 00ddd3bab79d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/bigquery_data_exchange_v1beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Data Exchange v1beta1 API -========================================================= - -.. automodule:: google.cloud.bigquery_data_exchange_v1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/conf.py deleted file mode 100644 index 30a2a1c58ceb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-data-exchange documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-data-exchange" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-data-exchange-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-data-exchange.tex", - u"google-cloud-bigquery-data-exchange Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-data-exchange", - u"Google Cloud Bigquery Data Exchange Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-data-exchange", - u"google-cloud-bigquery-data-exchange Documentation", - author, - "google-cloud-bigquery-data-exchange", - "GAPIC library for Google Cloud Bigquery Data Exchange API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/index.rst deleted file mode 100644 index eebe613206f7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_data_exchange_v1beta1/services_ - bigquery_data_exchange_v1beta1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/__init__.py deleted file mode 100644 index 3c38d41985a3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/__init__.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_data_exchange import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.client import AnalyticsHubServiceClient -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.async_client import AnalyticsHubServiceAsyncClient - -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import CreateDataExchangeRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import CreateListingRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import DataExchange -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import DataProvider -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import DeleteDataExchangeRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import DeleteListingRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import DestinationDataset -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import DestinationDatasetReference -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import GetDataExchangeRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import GetListingRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import ListDataExchangesRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import ListDataExchangesResponse -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import Listing -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import ListListingsRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import ListListingsResponse -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import ListOrgDataExchangesRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import ListOrgDataExchangesResponse -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import Publisher -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import SubscribeListingRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import SubscribeListingResponse -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import UpdateDataExchangeRequest -from google.cloud.bigquery_data_exchange_v1beta1.types.dataexchange import UpdateListingRequest - -__all__ = ('AnalyticsHubServiceClient', - 'AnalyticsHubServiceAsyncClient', - 'CreateDataExchangeRequest', - 'CreateListingRequest', - 'DataExchange', - 'DataProvider', - 'DeleteDataExchangeRequest', - 'DeleteListingRequest', - 'DestinationDataset', - 'DestinationDatasetReference', - 'GetDataExchangeRequest', - 'GetListingRequest', - 'ListDataExchangesRequest', - 'ListDataExchangesResponse', - 'Listing', - 'ListListingsRequest', - 'ListListingsResponse', - 'ListOrgDataExchangesRequest', - 'ListOrgDataExchangesResponse', - 'Publisher', - 'SubscribeListingRequest', - 'SubscribeListingResponse', - 'UpdateDataExchangeRequest', - 'UpdateListingRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/py.typed b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/py.typed deleted file mode 100644 index 16312a296a41..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-data-exchange package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/__init__.py deleted file mode 100644 index 738f852f3360..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/__init__.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_data_exchange_v1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.analytics_hub_service import AnalyticsHubServiceClient -from .services.analytics_hub_service import AnalyticsHubServiceAsyncClient - -from .types.dataexchange import CreateDataExchangeRequest -from .types.dataexchange import CreateListingRequest -from .types.dataexchange import DataExchange -from .types.dataexchange import DataProvider -from .types.dataexchange import DeleteDataExchangeRequest -from .types.dataexchange import DeleteListingRequest -from .types.dataexchange import DestinationDataset -from .types.dataexchange import DestinationDatasetReference -from .types.dataexchange import GetDataExchangeRequest -from .types.dataexchange import GetListingRequest -from .types.dataexchange import ListDataExchangesRequest -from .types.dataexchange import ListDataExchangesResponse -from .types.dataexchange import Listing -from .types.dataexchange import ListListingsRequest -from .types.dataexchange import ListListingsResponse -from .types.dataexchange import ListOrgDataExchangesRequest -from .types.dataexchange import ListOrgDataExchangesResponse -from .types.dataexchange import Publisher -from .types.dataexchange import SubscribeListingRequest -from .types.dataexchange import SubscribeListingResponse -from .types.dataexchange import UpdateDataExchangeRequest -from .types.dataexchange import UpdateListingRequest - -__all__ = ( - 'AnalyticsHubServiceAsyncClient', -'AnalyticsHubServiceClient', -'CreateDataExchangeRequest', -'CreateListingRequest', -'DataExchange', -'DataProvider', -'DeleteDataExchangeRequest', -'DeleteListingRequest', -'DestinationDataset', -'DestinationDatasetReference', -'GetDataExchangeRequest', -'GetListingRequest', -'ListDataExchangesRequest', -'ListDataExchangesResponse', -'ListListingsRequest', -'ListListingsResponse', -'ListOrgDataExchangesRequest', -'ListOrgDataExchangesResponse', -'Listing', -'Publisher', -'SubscribeListingRequest', -'SubscribeListingResponse', -'UpdateDataExchangeRequest', -'UpdateListingRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/gapic_metadata.json deleted file mode 100644 index 1d0bb00ec817..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,173 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_data_exchange_v1beta1", - "protoPackage": "google.cloud.bigquery.dataexchange.v1beta1", - "schema": "1.0", - "services": { - "AnalyticsHubService": { - "clients": { - "grpc": { - "libraryClient": "AnalyticsHubServiceClient", - "rpcs": { - "CreateDataExchange": { - "methods": [ - "create_data_exchange" - ] - }, - "CreateListing": { - "methods": [ - "create_listing" - ] - }, - "DeleteDataExchange": { - "methods": [ - "delete_data_exchange" - ] - }, - "DeleteListing": { - "methods": [ - "delete_listing" - ] - }, - "GetDataExchange": { - "methods": [ - "get_data_exchange" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetListing": { - "methods": [ - "get_listing" - ] - }, - "ListDataExchanges": { - "methods": [ - "list_data_exchanges" - ] - }, - "ListListings": { - "methods": [ - "list_listings" - ] - }, - "ListOrgDataExchanges": { - "methods": [ - "list_org_data_exchanges" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "SubscribeListing": { - "methods": [ - "subscribe_listing" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataExchange": { - "methods": [ - "update_data_exchange" - ] - }, - "UpdateListing": { - "methods": [ - "update_listing" - ] - } - } - }, - "grpc-async": { - "libraryClient": "AnalyticsHubServiceAsyncClient", - "rpcs": { - "CreateDataExchange": { - "methods": [ - "create_data_exchange" - ] - }, - "CreateListing": { - "methods": [ - "create_listing" - ] - }, - "DeleteDataExchange": { - "methods": [ - "delete_data_exchange" - ] - }, - "DeleteListing": { - "methods": [ - "delete_listing" - ] - }, - "GetDataExchange": { - "methods": [ - "get_data_exchange" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "GetListing": { - "methods": [ - "get_listing" - ] - }, - "ListDataExchanges": { - "methods": [ - "list_data_exchanges" - ] - }, - "ListListings": { - "methods": [ - "list_listings" - ] - }, - "ListOrgDataExchanges": { - "methods": [ - "list_org_data_exchanges" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "SubscribeListing": { - "methods": [ - "subscribe_listing" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataExchange": { - "methods": [ - "update_data_exchange" - ] - }, - "UpdateListing": { - "methods": [ - "update_listing" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/py.typed b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/py.typed deleted file mode 100644 index 16312a296a41..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-data-exchange package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/__init__.py deleted file mode 100644 index 0eb53446cbe6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import AnalyticsHubServiceClient -from .async_client import AnalyticsHubServiceAsyncClient - -__all__ = ( - 'AnalyticsHubServiceClient', - 'AnalyticsHubServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py deleted file mode 100644 index ef4bf4362bbd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py +++ /dev/null @@ -1,2076 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_data_exchange_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service import pagers -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import AnalyticsHubServiceGrpcAsyncIOTransport -from .client import AnalyticsHubServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class AnalyticsHubServiceAsyncClient: - """The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - """ - - _client: AnalyticsHubServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = AnalyticsHubServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - - data_exchange_path = staticmethod(AnalyticsHubServiceClient.data_exchange_path) - parse_data_exchange_path = staticmethod(AnalyticsHubServiceClient.parse_data_exchange_path) - dataset_path = staticmethod(AnalyticsHubServiceClient.dataset_path) - parse_dataset_path = staticmethod(AnalyticsHubServiceClient.parse_dataset_path) - listing_path = staticmethod(AnalyticsHubServiceClient.listing_path) - parse_listing_path = staticmethod(AnalyticsHubServiceClient.parse_listing_path) - common_billing_account_path = staticmethod(AnalyticsHubServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(AnalyticsHubServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(AnalyticsHubServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(AnalyticsHubServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(AnalyticsHubServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(AnalyticsHubServiceClient.parse_common_organization_path) - common_project_path = staticmethod(AnalyticsHubServiceClient.common_project_path) - parse_common_project_path = staticmethod(AnalyticsHubServiceClient.parse_common_project_path) - common_location_path = staticmethod(AnalyticsHubServiceClient.common_location_path) - parse_common_location_path = staticmethod(AnalyticsHubServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceAsyncClient: The constructed client. - """ - return AnalyticsHubServiceClient.from_service_account_info.__func__(AnalyticsHubServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceAsyncClient: The constructed client. - """ - return AnalyticsHubServiceClient.from_service_account_file.__func__(AnalyticsHubServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return AnalyticsHubServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> AnalyticsHubServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AnalyticsHubServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = AnalyticsHubServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AnalyticsHubServiceTransport, Callable[..., AnalyticsHubServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the analytics hub service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AnalyticsHubServiceTransport,Callable[..., AnalyticsHubServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AnalyticsHubServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = AnalyticsHubServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.dataexchange_v1beta1.AnalyticsHubServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "credentialsType": None, - } - ) - - async def list_data_exchanges(self, - request: Optional[Union[dataexchange.ListDataExchangesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataExchangesAsyncPager: - r"""Lists all data exchanges in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_list_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesRequest, dict]]): - The request object. Message for requesting the list of - data exchanges. - parent (:class:`str`): - Required. The parent resource path of the data - exchanges. e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListDataExchangesAsyncPager: - Message for response to the list of - data exchanges. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.ListDataExchangesRequest): - request = dataexchange.ListDataExchangesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataExchangesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_org_data_exchanges(self, - request: Optional[Union[dataexchange.ListOrgDataExchangesRequest, dict]] = None, - *, - organization: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListOrgDataExchangesAsyncPager: - r"""Lists all data exchanges from projects in a given - organization and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesRequest, dict]]): - The request object. Message for requesting the list of - data exchanges from projects in an - organization and location. - organization (:class:`str`): - Required. The organization resource path of the projects - containing DataExchanges. e.g. - ``organizations/myorg/locations/US``. - - This corresponds to the ``organization`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListOrgDataExchangesAsyncPager: - Message for response to listing data - exchanges in an organization and - location. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [organization] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.ListOrgDataExchangesRequest): - request = dataexchange.ListOrgDataExchangesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if organization is not None: - request.organization = organization - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_org_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("organization", request.organization), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListOrgDataExchangesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_data_exchange(self, - request: Optional[Union[dataexchange.GetDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.DataExchange: - r"""Gets the details of a data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_get_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.GetDataExchangeRequest, dict]]): - The request object. Message for getting a data exchange. - name (:class:`str`): - Required. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.GetDataExchangeRequest): - request = dataexchange.GetDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_data_exchange(self, - request: Optional[Union[dataexchange.CreateDataExchangeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_exchange: Optional[dataexchange.DataExchange] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.DataExchange: - r"""Creates a new data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_create_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = await client.create_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.CreateDataExchangeRequest, dict]]): - The request object. Message for creating a data exchange. - parent (:class:`str`): - Required. The parent resource path of the data exchange. - e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_exchange (:class:`google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange`): - Required. The data exchange to - create. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_exchange] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.CreateDataExchangeRequest): - request = dataexchange.CreateDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_exchange is not None: - request.data_exchange = data_exchange - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_data_exchange(self, - request: Optional[Union[dataexchange.UpdateDataExchangeRequest, dict]] = None, - *, - data_exchange: Optional[dataexchange.DataExchange] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.DataExchange: - r"""Updates an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_update_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = await client.update_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.UpdateDataExchangeRequest, dict]]): - The request object. Message for updating a data exchange. - data_exchange (:class:`google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange`): - Required. The data exchange to - update. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask specifies the fields to update in - the data exchange resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_exchange, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.UpdateDataExchangeRequest): - request = dataexchange.UpdateDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_exchange is not None: - request.data_exchange = data_exchange - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_exchange.name", request.data_exchange.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_data_exchange(self, - request: Optional[Union[dataexchange.DeleteDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_delete_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_exchange(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.DeleteDataExchangeRequest, dict]]): - The request object. Message for deleting a data exchange. - name (:class:`str`): - Required. The full name of the data exchange resource - that you want to delete. For example, - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.DeleteDataExchangeRequest): - request = dataexchange.DeleteDataExchangeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_listings(self, - request: Optional[Union[dataexchange.ListListingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListListingsAsyncPager: - r"""Lists all listings in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_list_listings(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsRequest, dict]]): - The request object. Message for requesting the list of - listings. - parent (:class:`str`): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListListingsAsyncPager: - Message for response to the list of - Listings. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.ListListingsRequest): - request = dataexchange.ListListingsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_listings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListListingsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_listing(self, - request: Optional[Union[dataexchange.GetListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.Listing: - r"""Gets the details of a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_get_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.GetListingRequest, dict]]): - The request object. Message for getting a listing. - name (:class:`str`): - Required. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.GetListingRequest): - request = dataexchange.GetListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_listing(self, - request: Optional[Union[dataexchange.CreateListingRequest, dict]] = None, - *, - parent: Optional[str] = None, - listing: Optional[dataexchange.Listing] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.Listing: - r"""Creates a new listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_create_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = await client.create_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.CreateListingRequest, dict]]): - The request object. Message for creating a listing. - parent (:class:`str`): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - listing (:class:`google.cloud.bigquery_data_exchange_v1beta1.types.Listing`): - Required. The listing to create. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, listing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.CreateListingRequest): - request = dataexchange.CreateListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if listing is not None: - request.listing = listing - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_listing(self, - request: Optional[Union[dataexchange.UpdateListingRequest, dict]] = None, - *, - listing: Optional[dataexchange.Listing] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.Listing: - r"""Updates an existing listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_update_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = await client.update_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.UpdateListingRequest, dict]]): - The request object. Message for updating a Listing. - listing (:class:`google.cloud.bigquery_data_exchange_v1beta1.types.Listing`): - Required. The listing to update. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Field mask specifies the fields to update in - the listing resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [listing, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.UpdateListingRequest): - request = dataexchange.UpdateListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if listing is not None: - request.listing = listing - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("listing.name", request.listing.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_listing(self, - request: Optional[Union[dataexchange.DeleteListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_delete_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - await client.delete_listing(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.DeleteListingRequest, dict]]): - The request object. Message for deleting a listing. - name (:class:`str`): - Required. Resource name of the listing to delete. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.DeleteListingRequest): - request = dataexchange.DeleteListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def subscribe_listing(self, - request: Optional[Union[dataexchange.SubscribeListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.SubscribeListingResponse: - r"""Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - async def sample_subscribe_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - destination_dataset = bigquery_data_exchange_v1beta1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_data_exchange_v1beta1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = await client.subscribe_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingRequest, dict]]): - The request object. Message for subscribing to a listing. - name (:class:`str`): - Required. Resource name of the listing that you want to - subscribe to. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingResponse: - Message for response when you - subscribe to a listing. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.SubscribeListingRequest): - request = dataexchange.SubscribeListingRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.subscribe_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the permissions that a caller has. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "AnalyticsHubServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AnalyticsHubServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py deleted file mode 100644 index 993036dd2599..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ /dev/null @@ -1,2448 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_data_exchange_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service import pagers -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import AnalyticsHubServiceGrpcTransport -from .transports.grpc_asyncio import AnalyticsHubServiceGrpcAsyncIOTransport - - -class AnalyticsHubServiceClientMeta(type): - """Metaclass for the AnalyticsHubService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[AnalyticsHubServiceTransport]] - _transport_registry["grpc"] = AnalyticsHubServiceGrpcTransport - _transport_registry["grpc_asyncio"] = AnalyticsHubServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AnalyticsHubServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AnalyticsHubServiceClient(metaclass=AnalyticsHubServiceClientMeta): - """The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "analyticshub.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "analyticshub.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AnalyticsHubServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> AnalyticsHubServiceTransport: - """Returns the transport used by the client instance. - - Returns: - AnalyticsHubServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_exchange_path(project: str,location: str,data_exchange: str,) -> str: - """Returns a fully-qualified data_exchange string.""" - return "projects/{project}/locations/{location}/dataExchanges/{data_exchange}".format(project=project, location=location, data_exchange=data_exchange, ) - - @staticmethod - def parse_data_exchange_path(path: str) -> Dict[str,str]: - """Parses a data_exchange path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataExchanges/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dataset_path(project: str,dataset: str,) -> str: - """Returns a fully-qualified dataset string.""" - return "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - - @staticmethod - def parse_dataset_path(path: str) -> Dict[str,str]: - """Parses a dataset path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/datasets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def listing_path(project: str,location: str,data_exchange: str,listing: str,) -> str: - """Returns a fully-qualified listing string.""" - return "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format(project=project, location=location, data_exchange=data_exchange, listing=listing, ) - - @staticmethod - def parse_listing_path(path: str) -> Dict[str,str]: - """Parses a listing path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataExchanges/(?P.+?)/listings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AnalyticsHubServiceTransport, Callable[..., AnalyticsHubServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the analytics hub service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,AnalyticsHubServiceTransport,Callable[..., AnalyticsHubServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the AnalyticsHubServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AnalyticsHubServiceClient._read_environment_variables() - self._client_cert_source = AnalyticsHubServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = AnalyticsHubServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, AnalyticsHubServiceTransport) - if transport_provided: - # transport is a AnalyticsHubServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(AnalyticsHubServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - AnalyticsHubServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[AnalyticsHubServiceTransport], Callable[..., AnalyticsHubServiceTransport]] = ( - AnalyticsHubServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., AnalyticsHubServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.dataexchange_v1beta1.AnalyticsHubServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "credentialsType": None, - } - ) - - def list_data_exchanges(self, - request: Optional[Union[dataexchange.ListDataExchangesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataExchangesPager: - r"""Lists all data exchanges in a given project and - location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_list_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesRequest, dict]): - The request object. Message for requesting the list of - data exchanges. - parent (str): - Required. The parent resource path of the data - exchanges. e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListDataExchangesPager: - Message for response to the list of - data exchanges. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.ListDataExchangesRequest): - request = dataexchange.ListDataExchangesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataExchangesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_org_data_exchanges(self, - request: Optional[Union[dataexchange.ListOrgDataExchangesRequest, dict]] = None, - *, - organization: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListOrgDataExchangesPager: - r"""Lists all data exchanges from projects in a given - organization and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesRequest, dict]): - The request object. Message for requesting the list of - data exchanges from projects in an - organization and location. - organization (str): - Required. The organization resource path of the projects - containing DataExchanges. e.g. - ``organizations/myorg/locations/US``. - - This corresponds to the ``organization`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListOrgDataExchangesPager: - Message for response to listing data - exchanges in an organization and - location. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [organization] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.ListOrgDataExchangesRequest): - request = dataexchange.ListOrgDataExchangesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if organization is not None: - request.organization = organization - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_org_data_exchanges] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("organization", request.organization), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListOrgDataExchangesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_data_exchange(self, - request: Optional[Union[dataexchange.GetDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.DataExchange: - r"""Gets the details of a data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_get_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.GetDataExchangeRequest, dict]): - The request object. Message for getting a data exchange. - name (str): - Required. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.GetDataExchangeRequest): - request = dataexchange.GetDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_data_exchange(self, - request: Optional[Union[dataexchange.CreateDataExchangeRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_exchange: Optional[dataexchange.DataExchange] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.DataExchange: - r"""Creates a new data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_create_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = client.create_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.CreateDataExchangeRequest, dict]): - The request object. Message for creating a data exchange. - parent (str): - Required. The parent resource path of the data exchange. - e.g. ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_exchange (google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange): - Required. The data exchange to - create. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_exchange] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.CreateDataExchangeRequest): - request = dataexchange.CreateDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_exchange is not None: - request.data_exchange = data_exchange - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_data_exchange(self, - request: Optional[Union[dataexchange.UpdateDataExchangeRequest, dict]] = None, - *, - data_exchange: Optional[dataexchange.DataExchange] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.DataExchange: - r"""Updates an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_update_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = client.update_data_exchange(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.UpdateDataExchangeRequest, dict]): - The request object. Message for updating a data exchange. - data_exchange (google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange): - Required. The data exchange to - update. - - This corresponds to the ``data_exchange`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in - the data exchange resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange: - A data exchange is a container that - lets you share data. Along with the - descriptive information about the data - exchange, it contains listings that - reference shared datasets. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_exchange, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.UpdateDataExchangeRequest): - request = dataexchange.UpdateDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_exchange is not None: - request.data_exchange = data_exchange - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_exchange.name", request.data_exchange.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_data_exchange(self, - request: Optional[Union[dataexchange.DeleteDataExchangeRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes an existing data exchange. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_delete_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - client.delete_data_exchange(request=request) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.DeleteDataExchangeRequest, dict]): - The request object. Message for deleting a data exchange. - name (str): - Required. The full name of the data exchange resource - that you want to delete. For example, - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.DeleteDataExchangeRequest): - request = dataexchange.DeleteDataExchangeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_exchange] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_listings(self, - request: Optional[Union[dataexchange.ListListingsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListListingsPager: - r"""Lists all listings in a given project and location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_list_listings(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsRequest, dict]): - The request object. Message for requesting the list of - listings. - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListListingsPager: - Message for response to the list of - Listings. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.ListListingsRequest): - request = dataexchange.ListListingsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_listings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListListingsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_listing(self, - request: Optional[Union[dataexchange.GetListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.Listing: - r"""Gets the details of a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_get_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = client.get_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.GetListingRequest, dict]): - The request object. Message for getting a listing. - name (str): - Required. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.GetListingRequest): - request = dataexchange.GetListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_listing(self, - request: Optional[Union[dataexchange.CreateListingRequest, dict]] = None, - *, - parent: Optional[str] = None, - listing: Optional[dataexchange.Listing] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.Listing: - r"""Creates a new listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_create_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = client.create_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.CreateListingRequest, dict]): - The request object. Message for creating a listing. - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - listing (google.cloud.bigquery_data_exchange_v1beta1.types.Listing): - Required. The listing to create. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, listing] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.CreateListingRequest): - request = dataexchange.CreateListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if listing is not None: - request.listing = listing - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_listing(self, - request: Optional[Union[dataexchange.UpdateListingRequest, dict]] = None, - *, - listing: Optional[dataexchange.Listing] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.Listing: - r"""Updates an existing listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_update_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = client.update_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.UpdateListingRequest, dict]): - The request object. Message for updating a Listing. - listing (google.cloud.bigquery_data_exchange_v1beta1.types.Listing): - Required. The listing to update. - This corresponds to the ``listing`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in - the listing resource. The fields specified in the - ``updateMask`` are relative to the resource and are not - a full request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.Listing: - A listing is what gets published into - a data exchange that a subscriber can - subscribe to. It contains a reference to - the data source along with descriptive - information that will help subscribers - find and subscribe the data. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [listing, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.UpdateListingRequest): - request = dataexchange.UpdateListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if listing is not None: - request.listing = listing - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("listing.name", request.listing.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_listing(self, - request: Optional[Union[dataexchange.DeleteListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a listing. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_delete_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - client.delete_listing(request=request) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.DeleteListingRequest, dict]): - The request object. Message for deleting a listing. - name (str): - Required. Resource name of the listing to delete. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.DeleteListingRequest): - request = dataexchange.DeleteListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def subscribe_listing(self, - request: Optional[Union[dataexchange.SubscribeListingRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dataexchange.SubscribeListingResponse: - r"""Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - - def sample_subscribe_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - destination_dataset = bigquery_data_exchange_v1beta1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_data_exchange_v1beta1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = client.subscribe_listing(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingRequest, dict]): - The request object. Message for subscribing to a listing. - name (str): - Required. Resource name of the listing that you want to - subscribe to. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingResponse: - Message for response when you - subscribe to a listing. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, dataexchange.SubscribeListingRequest): - request = dataexchange.SubscribeListingRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.subscribe_listing] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the permissions that a caller has. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_data_exchange_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "AnalyticsHubServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "AnalyticsHubServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/pagers.py deleted file mode 100644 index 684df45274cd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/pagers.py +++ /dev/null @@ -1,444 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange - - -class ListDataExchangesPager: - """A pager for iterating through ``list_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dataexchange.ListDataExchangesResponse], - request: dataexchange.ListDataExchangesRequest, - response: dataexchange.ListDataExchangesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dataexchange.ListDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dataexchange.ListDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dataexchange.DataExchange]: - for page in self.pages: - yield from page.data_exchanges - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataExchangesAsyncPager: - """A pager for iterating through ``list_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dataexchange.ListDataExchangesResponse]], - request: dataexchange.ListDataExchangesRequest, - response: dataexchange.ListDataExchangesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dataexchange.ListDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dataexchange.ListDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dataexchange.DataExchange]: - async def async_generator(): - async for page in self.pages: - for response in page.data_exchanges: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListOrgDataExchangesPager: - """A pager for iterating through ``list_org_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListOrgDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dataexchange.ListOrgDataExchangesResponse], - request: dataexchange.ListOrgDataExchangesRequest, - response: dataexchange.ListOrgDataExchangesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dataexchange.ListOrgDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dataexchange.ListOrgDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dataexchange.DataExchange]: - for page in self.pages: - yield from page.data_exchanges - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListOrgDataExchangesAsyncPager: - """A pager for iterating through ``list_org_data_exchanges`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_exchanges`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListOrgDataExchanges`` requests and continue to iterate - through the ``data_exchanges`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dataexchange.ListOrgDataExchangesResponse]], - request: dataexchange.ListOrgDataExchangesRequest, - response: dataexchange.ListOrgDataExchangesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesRequest): - The initial request object. - response (google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dataexchange.ListOrgDataExchangesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dataexchange.ListOrgDataExchangesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dataexchange.DataExchange]: - async def async_generator(): - async for page in self.pages: - for response in page.data_exchanges: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListListingsPager: - """A pager for iterating through ``list_listings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``listings`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListListings`` requests and continue to iterate - through the ``listings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dataexchange.ListListingsResponse], - request: dataexchange.ListListingsRequest, - response: dataexchange.ListListingsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsRequest): - The initial request object. - response (google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dataexchange.ListListingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[dataexchange.ListListingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[dataexchange.Listing]: - for page in self.pages: - yield from page.listings - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListListingsAsyncPager: - """A pager for iterating through ``list_listings`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``listings`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListListings`` requests and continue to iterate - through the ``listings`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dataexchange.ListListingsResponse]], - request: dataexchange.ListListingsRequest, - response: dataexchange.ListListingsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsRequest): - The initial request object. - response (google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = dataexchange.ListListingsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[dataexchange.ListListingsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[dataexchange.Listing]: - async def async_generator(): - async for page in self.pages: - for response in page.listings: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/README.rst deleted file mode 100644 index dbc9199b0730..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`AnalyticsHubServiceTransport` is the ABC for all transports. -- public child `AnalyticsHubServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `AnalyticsHubServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseAnalyticsHubServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `AnalyticsHubServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/__init__.py deleted file mode 100644 index 0627131b6136..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import AnalyticsHubServiceTransport -from .grpc import AnalyticsHubServiceGrpcTransport -from .grpc_asyncio import AnalyticsHubServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[AnalyticsHubServiceTransport]] -_transport_registry['grpc'] = AnalyticsHubServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AnalyticsHubServiceGrpcAsyncIOTransport - -__all__ = ( - 'AnalyticsHubServiceTransport', - 'AnalyticsHubServiceGrpcTransport', - 'AnalyticsHubServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/base.py deleted file mode 100644 index 95299926cb02..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/base.py +++ /dev/null @@ -1,381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_data_exchange_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class AnalyticsHubServiceTransport(abc.ABC): - """Abstract transport class for AnalyticsHubService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'analyticshub.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'analyticshub.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_data_exchanges: gapic_v1.method.wrap_method( - self.list_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.list_org_data_exchanges: gapic_v1.method.wrap_method( - self.list_org_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.get_data_exchange: gapic_v1.method.wrap_method( - self.get_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.create_data_exchange: gapic_v1.method.wrap_method( - self.create_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.update_data_exchange: gapic_v1.method.wrap_method( - self.update_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_exchange: gapic_v1.method.wrap_method( - self.delete_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.list_listings: gapic_v1.method.wrap_method( - self.list_listings, - default_timeout=None, - client_info=client_info, - ), - self.get_listing: gapic_v1.method.wrap_method( - self.get_listing, - default_timeout=None, - client_info=client_info, - ), - self.create_listing: gapic_v1.method.wrap_method( - self.create_listing, - default_timeout=None, - client_info=client_info, - ), - self.update_listing: gapic_v1.method.wrap_method( - self.update_listing, - default_timeout=None, - client_info=client_info, - ), - self.delete_listing: gapic_v1.method.wrap_method( - self.delete_listing, - default_timeout=None, - client_info=client_info, - ), - self.subscribe_listing: gapic_v1.method.wrap_method( - self.subscribe_listing, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def list_data_exchanges(self) -> Callable[ - [dataexchange.ListDataExchangesRequest], - Union[ - dataexchange.ListDataExchangesResponse, - Awaitable[dataexchange.ListDataExchangesResponse] - ]]: - raise NotImplementedError() - - @property - def list_org_data_exchanges(self) -> Callable[ - [dataexchange.ListOrgDataExchangesRequest], - Union[ - dataexchange.ListOrgDataExchangesResponse, - Awaitable[dataexchange.ListOrgDataExchangesResponse] - ]]: - raise NotImplementedError() - - @property - def get_data_exchange(self) -> Callable[ - [dataexchange.GetDataExchangeRequest], - Union[ - dataexchange.DataExchange, - Awaitable[dataexchange.DataExchange] - ]]: - raise NotImplementedError() - - @property - def create_data_exchange(self) -> Callable[ - [dataexchange.CreateDataExchangeRequest], - Union[ - dataexchange.DataExchange, - Awaitable[dataexchange.DataExchange] - ]]: - raise NotImplementedError() - - @property - def update_data_exchange(self) -> Callable[ - [dataexchange.UpdateDataExchangeRequest], - Union[ - dataexchange.DataExchange, - Awaitable[dataexchange.DataExchange] - ]]: - raise NotImplementedError() - - @property - def delete_data_exchange(self) -> Callable[ - [dataexchange.DeleteDataExchangeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_listings(self) -> Callable[ - [dataexchange.ListListingsRequest], - Union[ - dataexchange.ListListingsResponse, - Awaitable[dataexchange.ListListingsResponse] - ]]: - raise NotImplementedError() - - @property - def get_listing(self) -> Callable[ - [dataexchange.GetListingRequest], - Union[ - dataexchange.Listing, - Awaitable[dataexchange.Listing] - ]]: - raise NotImplementedError() - - @property - def create_listing(self) -> Callable[ - [dataexchange.CreateListingRequest], - Union[ - dataexchange.Listing, - Awaitable[dataexchange.Listing] - ]]: - raise NotImplementedError() - - @property - def update_listing(self) -> Callable[ - [dataexchange.UpdateListingRequest], - Union[ - dataexchange.Listing, - Awaitable[dataexchange.Listing] - ]]: - raise NotImplementedError() - - @property - def delete_listing(self) -> Callable[ - [dataexchange.DeleteListingRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def subscribe_listing(self) -> Callable[ - [dataexchange.SubscribeListingRequest], - Union[ - dataexchange.SubscribeListingResponse, - Awaitable[dataexchange.SubscribeListingResponse] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'AnalyticsHubServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py deleted file mode 100644 index 3cfbe9cb2e83..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc.py +++ /dev/null @@ -1,765 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AnalyticsHubServiceGrpcTransport(AnalyticsHubServiceTransport): - """gRPC backend transport for AnalyticsHubService. - - The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'analyticshub.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_data_exchanges(self) -> Callable[ - [dataexchange.ListDataExchangesRequest], - dataexchange.ListDataExchangesResponse]: - r"""Return a callable for the list data exchanges method over gRPC. - - Lists all data exchanges in a given project and - location. - - Returns: - Callable[[~.ListDataExchangesRequest], - ~.ListDataExchangesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_exchanges' not in self._stubs: - self._stubs['list_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/ListDataExchanges', - request_serializer=dataexchange.ListDataExchangesRequest.serialize, - response_deserializer=dataexchange.ListDataExchangesResponse.deserialize, - ) - return self._stubs['list_data_exchanges'] - - @property - def list_org_data_exchanges(self) -> Callable[ - [dataexchange.ListOrgDataExchangesRequest], - dataexchange.ListOrgDataExchangesResponse]: - r"""Return a callable for the list org data exchanges method over gRPC. - - Lists all data exchanges from projects in a given - organization and location. - - Returns: - Callable[[~.ListOrgDataExchangesRequest], - ~.ListOrgDataExchangesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_org_data_exchanges' not in self._stubs: - self._stubs['list_org_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/ListOrgDataExchanges', - request_serializer=dataexchange.ListOrgDataExchangesRequest.serialize, - response_deserializer=dataexchange.ListOrgDataExchangesResponse.deserialize, - ) - return self._stubs['list_org_data_exchanges'] - - @property - def get_data_exchange(self) -> Callable[ - [dataexchange.GetDataExchangeRequest], - dataexchange.DataExchange]: - r"""Return a callable for the get data exchange method over gRPC. - - Gets the details of a data exchange. - - Returns: - Callable[[~.GetDataExchangeRequest], - ~.DataExchange]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_exchange' not in self._stubs: - self._stubs['get_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/GetDataExchange', - request_serializer=dataexchange.GetDataExchangeRequest.serialize, - response_deserializer=dataexchange.DataExchange.deserialize, - ) - return self._stubs['get_data_exchange'] - - @property - def create_data_exchange(self) -> Callable[ - [dataexchange.CreateDataExchangeRequest], - dataexchange.DataExchange]: - r"""Return a callable for the create data exchange method over gRPC. - - Creates a new data exchange. - - Returns: - Callable[[~.CreateDataExchangeRequest], - ~.DataExchange]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_exchange' not in self._stubs: - self._stubs['create_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/CreateDataExchange', - request_serializer=dataexchange.CreateDataExchangeRequest.serialize, - response_deserializer=dataexchange.DataExchange.deserialize, - ) - return self._stubs['create_data_exchange'] - - @property - def update_data_exchange(self) -> Callable[ - [dataexchange.UpdateDataExchangeRequest], - dataexchange.DataExchange]: - r"""Return a callable for the update data exchange method over gRPC. - - Updates an existing data exchange. - - Returns: - Callable[[~.UpdateDataExchangeRequest], - ~.DataExchange]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_exchange' not in self._stubs: - self._stubs['update_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/UpdateDataExchange', - request_serializer=dataexchange.UpdateDataExchangeRequest.serialize, - response_deserializer=dataexchange.DataExchange.deserialize, - ) - return self._stubs['update_data_exchange'] - - @property - def delete_data_exchange(self) -> Callable[ - [dataexchange.DeleteDataExchangeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete data exchange method over gRPC. - - Deletes an existing data exchange. - - Returns: - Callable[[~.DeleteDataExchangeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_exchange' not in self._stubs: - self._stubs['delete_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/DeleteDataExchange', - request_serializer=dataexchange.DeleteDataExchangeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_exchange'] - - @property - def list_listings(self) -> Callable[ - [dataexchange.ListListingsRequest], - dataexchange.ListListingsResponse]: - r"""Return a callable for the list listings method over gRPC. - - Lists all listings in a given project and location. - - Returns: - Callable[[~.ListListingsRequest], - ~.ListListingsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_listings' not in self._stubs: - self._stubs['list_listings'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/ListListings', - request_serializer=dataexchange.ListListingsRequest.serialize, - response_deserializer=dataexchange.ListListingsResponse.deserialize, - ) - return self._stubs['list_listings'] - - @property - def get_listing(self) -> Callable[ - [dataexchange.GetListingRequest], - dataexchange.Listing]: - r"""Return a callable for the get listing method over gRPC. - - Gets the details of a listing. - - Returns: - Callable[[~.GetListingRequest], - ~.Listing]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_listing' not in self._stubs: - self._stubs['get_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/GetListing', - request_serializer=dataexchange.GetListingRequest.serialize, - response_deserializer=dataexchange.Listing.deserialize, - ) - return self._stubs['get_listing'] - - @property - def create_listing(self) -> Callable[ - [dataexchange.CreateListingRequest], - dataexchange.Listing]: - r"""Return a callable for the create listing method over gRPC. - - Creates a new listing. - - Returns: - Callable[[~.CreateListingRequest], - ~.Listing]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_listing' not in self._stubs: - self._stubs['create_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/CreateListing', - request_serializer=dataexchange.CreateListingRequest.serialize, - response_deserializer=dataexchange.Listing.deserialize, - ) - return self._stubs['create_listing'] - - @property - def update_listing(self) -> Callable[ - [dataexchange.UpdateListingRequest], - dataexchange.Listing]: - r"""Return a callable for the update listing method over gRPC. - - Updates an existing listing. - - Returns: - Callable[[~.UpdateListingRequest], - ~.Listing]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_listing' not in self._stubs: - self._stubs['update_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/UpdateListing', - request_serializer=dataexchange.UpdateListingRequest.serialize, - response_deserializer=dataexchange.Listing.deserialize, - ) - return self._stubs['update_listing'] - - @property - def delete_listing(self) -> Callable[ - [dataexchange.DeleteListingRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete listing method over gRPC. - - Deletes a listing. - - Returns: - Callable[[~.DeleteListingRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_listing' not in self._stubs: - self._stubs['delete_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/DeleteListing', - request_serializer=dataexchange.DeleteListingRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_listing'] - - @property - def subscribe_listing(self) -> Callable[ - [dataexchange.SubscribeListingRequest], - dataexchange.SubscribeListingResponse]: - r"""Return a callable for the subscribe listing method over gRPC. - - Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - Returns: - Callable[[~.SubscribeListingRequest], - ~.SubscribeListingResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'subscribe_listing' not in self._stubs: - self._stubs['subscribe_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/SubscribeListing', - request_serializer=dataexchange.SubscribeListingRequest.serialize, - response_deserializer=dataexchange.SubscribeListingResponse.deserialize, - ) - return self._stubs['subscribe_listing'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the permissions that a caller has. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self._logged_channel.close() - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'AnalyticsHubServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py deleted file mode 100644 index 6bb7bfdc787d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,865 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import AnalyticsHubServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import AnalyticsHubServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class AnalyticsHubServiceGrpcAsyncIOTransport(AnalyticsHubServiceTransport): - """gRPC AsyncIO backend transport for AnalyticsHubService. - - The ``AnalyticsHubService`` API facilitates data sharing within and - across organizations. It allows data providers to publish listings - that reference shared datasets. With Analytics Hub, users can - discover and search for listings that they have access to. - Subscribers can view and subscribe to listings. When you subscribe - to a listing, Analytics Hub creates a linked dataset in your - project. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'analyticshub.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'analyticshub.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_data_exchanges(self) -> Callable[ - [dataexchange.ListDataExchangesRequest], - Awaitable[dataexchange.ListDataExchangesResponse]]: - r"""Return a callable for the list data exchanges method over gRPC. - - Lists all data exchanges in a given project and - location. - - Returns: - Callable[[~.ListDataExchangesRequest], - Awaitable[~.ListDataExchangesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_exchanges' not in self._stubs: - self._stubs['list_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/ListDataExchanges', - request_serializer=dataexchange.ListDataExchangesRequest.serialize, - response_deserializer=dataexchange.ListDataExchangesResponse.deserialize, - ) - return self._stubs['list_data_exchanges'] - - @property - def list_org_data_exchanges(self) -> Callable[ - [dataexchange.ListOrgDataExchangesRequest], - Awaitable[dataexchange.ListOrgDataExchangesResponse]]: - r"""Return a callable for the list org data exchanges method over gRPC. - - Lists all data exchanges from projects in a given - organization and location. - - Returns: - Callable[[~.ListOrgDataExchangesRequest], - Awaitable[~.ListOrgDataExchangesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_org_data_exchanges' not in self._stubs: - self._stubs['list_org_data_exchanges'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/ListOrgDataExchanges', - request_serializer=dataexchange.ListOrgDataExchangesRequest.serialize, - response_deserializer=dataexchange.ListOrgDataExchangesResponse.deserialize, - ) - return self._stubs['list_org_data_exchanges'] - - @property - def get_data_exchange(self) -> Callable[ - [dataexchange.GetDataExchangeRequest], - Awaitable[dataexchange.DataExchange]]: - r"""Return a callable for the get data exchange method over gRPC. - - Gets the details of a data exchange. - - Returns: - Callable[[~.GetDataExchangeRequest], - Awaitable[~.DataExchange]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_exchange' not in self._stubs: - self._stubs['get_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/GetDataExchange', - request_serializer=dataexchange.GetDataExchangeRequest.serialize, - response_deserializer=dataexchange.DataExchange.deserialize, - ) - return self._stubs['get_data_exchange'] - - @property - def create_data_exchange(self) -> Callable[ - [dataexchange.CreateDataExchangeRequest], - Awaitable[dataexchange.DataExchange]]: - r"""Return a callable for the create data exchange method over gRPC. - - Creates a new data exchange. - - Returns: - Callable[[~.CreateDataExchangeRequest], - Awaitable[~.DataExchange]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_exchange' not in self._stubs: - self._stubs['create_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/CreateDataExchange', - request_serializer=dataexchange.CreateDataExchangeRequest.serialize, - response_deserializer=dataexchange.DataExchange.deserialize, - ) - return self._stubs['create_data_exchange'] - - @property - def update_data_exchange(self) -> Callable[ - [dataexchange.UpdateDataExchangeRequest], - Awaitable[dataexchange.DataExchange]]: - r"""Return a callable for the update data exchange method over gRPC. - - Updates an existing data exchange. - - Returns: - Callable[[~.UpdateDataExchangeRequest], - Awaitable[~.DataExchange]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_exchange' not in self._stubs: - self._stubs['update_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/UpdateDataExchange', - request_serializer=dataexchange.UpdateDataExchangeRequest.serialize, - response_deserializer=dataexchange.DataExchange.deserialize, - ) - return self._stubs['update_data_exchange'] - - @property - def delete_data_exchange(self) -> Callable[ - [dataexchange.DeleteDataExchangeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete data exchange method over gRPC. - - Deletes an existing data exchange. - - Returns: - Callable[[~.DeleteDataExchangeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_exchange' not in self._stubs: - self._stubs['delete_data_exchange'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/DeleteDataExchange', - request_serializer=dataexchange.DeleteDataExchangeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_exchange'] - - @property - def list_listings(self) -> Callable[ - [dataexchange.ListListingsRequest], - Awaitable[dataexchange.ListListingsResponse]]: - r"""Return a callable for the list listings method over gRPC. - - Lists all listings in a given project and location. - - Returns: - Callable[[~.ListListingsRequest], - Awaitable[~.ListListingsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_listings' not in self._stubs: - self._stubs['list_listings'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/ListListings', - request_serializer=dataexchange.ListListingsRequest.serialize, - response_deserializer=dataexchange.ListListingsResponse.deserialize, - ) - return self._stubs['list_listings'] - - @property - def get_listing(self) -> Callable[ - [dataexchange.GetListingRequest], - Awaitable[dataexchange.Listing]]: - r"""Return a callable for the get listing method over gRPC. - - Gets the details of a listing. - - Returns: - Callable[[~.GetListingRequest], - Awaitable[~.Listing]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_listing' not in self._stubs: - self._stubs['get_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/GetListing', - request_serializer=dataexchange.GetListingRequest.serialize, - response_deserializer=dataexchange.Listing.deserialize, - ) - return self._stubs['get_listing'] - - @property - def create_listing(self) -> Callable[ - [dataexchange.CreateListingRequest], - Awaitable[dataexchange.Listing]]: - r"""Return a callable for the create listing method over gRPC. - - Creates a new listing. - - Returns: - Callable[[~.CreateListingRequest], - Awaitable[~.Listing]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_listing' not in self._stubs: - self._stubs['create_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/CreateListing', - request_serializer=dataexchange.CreateListingRequest.serialize, - response_deserializer=dataexchange.Listing.deserialize, - ) - return self._stubs['create_listing'] - - @property - def update_listing(self) -> Callable[ - [dataexchange.UpdateListingRequest], - Awaitable[dataexchange.Listing]]: - r"""Return a callable for the update listing method over gRPC. - - Updates an existing listing. - - Returns: - Callable[[~.UpdateListingRequest], - Awaitable[~.Listing]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_listing' not in self._stubs: - self._stubs['update_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/UpdateListing', - request_serializer=dataexchange.UpdateListingRequest.serialize, - response_deserializer=dataexchange.Listing.deserialize, - ) - return self._stubs['update_listing'] - - @property - def delete_listing(self) -> Callable[ - [dataexchange.DeleteListingRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete listing method over gRPC. - - Deletes a listing. - - Returns: - Callable[[~.DeleteListingRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_listing' not in self._stubs: - self._stubs['delete_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/DeleteListing', - request_serializer=dataexchange.DeleteListingRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_listing'] - - @property - def subscribe_listing(self) -> Callable[ - [dataexchange.SubscribeListingRequest], - Awaitable[dataexchange.SubscribeListingResponse]]: - r"""Return a callable for the subscribe listing method over gRPC. - - Subscribes to a listing. - - Currently, with Analytics Hub, you can create listings - that reference only BigQuery datasets. - Upon subscription to a listing for a BigQuery dataset, - Analytics Hub creates a linked dataset in the - subscriber's project. - - Returns: - Callable[[~.SubscribeListingRequest], - Awaitable[~.SubscribeListingResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'subscribe_listing' not in self._stubs: - self._stubs['subscribe_listing'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/SubscribeListing', - request_serializer=dataexchange.SubscribeListingRequest.serialize, - response_deserializer=dataexchange.SubscribeListingResponse.deserialize, - ) - return self._stubs['subscribe_listing'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the permissions that a caller has. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.list_data_exchanges: self._wrap_method( - self.list_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.list_org_data_exchanges: self._wrap_method( - self.list_org_data_exchanges, - default_timeout=None, - client_info=client_info, - ), - self.get_data_exchange: self._wrap_method( - self.get_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.create_data_exchange: self._wrap_method( - self.create_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.update_data_exchange: self._wrap_method( - self.update_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.delete_data_exchange: self._wrap_method( - self.delete_data_exchange, - default_timeout=None, - client_info=client_info, - ), - self.list_listings: self._wrap_method( - self.list_listings, - default_timeout=None, - client_info=client_info, - ), - self.get_listing: self._wrap_method( - self.get_listing, - default_timeout=None, - client_info=client_info, - ), - self.create_listing: self._wrap_method( - self.create_listing, - default_timeout=None, - client_info=client_info, - ), - self.update_listing: self._wrap_method( - self.update_listing, - default_timeout=None, - client_info=client_info, - ), - self.delete_listing: self._wrap_method( - self.delete_listing, - default_timeout=None, - client_info=client_info, - ), - self.subscribe_listing: self._wrap_method( - self.subscribe_listing, - default_timeout=None, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_timeout=None, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'AnalyticsHubServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/types/__init__.py deleted file mode 100644 index 19d17e7730a7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/types/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .dataexchange import ( - CreateDataExchangeRequest, - CreateListingRequest, - DataExchange, - DataProvider, - DeleteDataExchangeRequest, - DeleteListingRequest, - DestinationDataset, - DestinationDatasetReference, - GetDataExchangeRequest, - GetListingRequest, - ListDataExchangesRequest, - ListDataExchangesResponse, - Listing, - ListListingsRequest, - ListListingsResponse, - ListOrgDataExchangesRequest, - ListOrgDataExchangesResponse, - Publisher, - SubscribeListingRequest, - SubscribeListingResponse, - UpdateDataExchangeRequest, - UpdateListingRequest, -) - -__all__ = ( - 'CreateDataExchangeRequest', - 'CreateListingRequest', - 'DataExchange', - 'DataProvider', - 'DeleteDataExchangeRequest', - 'DeleteListingRequest', - 'DestinationDataset', - 'DestinationDatasetReference', - 'GetDataExchangeRequest', - 'GetListingRequest', - 'ListDataExchangesRequest', - 'ListDataExchangesResponse', - 'Listing', - 'ListListingsRequest', - 'ListListingsResponse', - 'ListOrgDataExchangesRequest', - 'ListOrgDataExchangesResponse', - 'Publisher', - 'SubscribeListingRequest', - 'SubscribeListingResponse', - 'UpdateDataExchangeRequest', - 'UpdateListingRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py deleted file mode 100644 index beaf3a5d0047..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/google/cloud/bigquery_data_exchange_v1beta1/types/dataexchange.py +++ /dev/null @@ -1,842 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.dataexchange.v1beta1', - manifest={ - 'DataExchange', - 'DataProvider', - 'Publisher', - 'DestinationDatasetReference', - 'DestinationDataset', - 'Listing', - 'ListDataExchangesRequest', - 'ListDataExchangesResponse', - 'ListOrgDataExchangesRequest', - 'ListOrgDataExchangesResponse', - 'GetDataExchangeRequest', - 'CreateDataExchangeRequest', - 'UpdateDataExchangeRequest', - 'DeleteDataExchangeRequest', - 'ListListingsRequest', - 'ListListingsResponse', - 'GetListingRequest', - 'CreateListingRequest', - 'UpdateListingRequest', - 'DeleteListingRequest', - 'SubscribeListingRequest', - 'SubscribeListingResponse', - }, -) - - -class DataExchange(proto.Message): - r"""A data exchange is a container that lets you share data. - Along with the descriptive information about the data exchange, - it contains listings that reference shared datasets. - - Attributes: - name (str): - Output only. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - display_name (str): - Required. Human-readable display name of the data exchange. - The display name must contain only Unicode letters, numbers - (0-9), underscores (_), dashes (-), spaces ( ), ampersands - (&) and must not start or end with spaces. Default value is - an empty string. Max length: 63 bytes. - description (str): - Optional. Description of the data exchange. - The description must not contain Unicode - non-characters as well as C0 and C1 control - codes except tabs (HT), new lines (LF), carriage - returns (CR), and page breaks (FF). Default - value is an empty string. - Max length: 2000 bytes. - primary_contact (str): - Optional. Email or URL of the primary point - of contact of the data exchange. Max Length: - 1000 bytes. - documentation (str): - Optional. Documentation describing the data - exchange. - listing_count (int): - Output only. Number of listings contained in - the data exchange. - icon (bytes): - Optional. Base64 encoded image representing - the data exchange. Max Size: 3.0MiB Expected - image dimensions are 512x512 pixels, however the - API only performs validation on size of the - encoded data. Note: For byte fields, the content - of the fields are base64-encoded (which - increases the size of the data by 33-36%) when - using JSON on the wire. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=4, - ) - documentation: str = proto.Field( - proto.STRING, - number=5, - ) - listing_count: int = proto.Field( - proto.INT32, - number=6, - ) - icon: bytes = proto.Field( - proto.BYTES, - number=7, - ) - - -class DataProvider(proto.Message): - r"""Contains details of the data provider. - - Attributes: - name (str): - Optional. Name of the data provider. - primary_contact (str): - Optional. Email or URL of the data provider. - Max Length: 1000 bytes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Publisher(proto.Message): - r"""Contains details of the listing publisher. - - Attributes: - name (str): - Optional. Name of the listing publisher. - primary_contact (str): - Optional. Email or URL of the listing - publisher. Max Length: 1000 bytes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DestinationDatasetReference(proto.Message): - r"""Contains the reference that identifies a destination bigquery - dataset. - - Attributes: - dataset_id (str): - Required. A unique ID for this dataset, without the project - name. The ID must contain only letters (a-z, A-Z), numbers - (0-9), or underscores (_). The maximum length is 1,024 - characters. - project_id (str): - Required. The ID of the project containing - this dataset. - """ - - dataset_id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DestinationDataset(proto.Message): - r"""Defines the destination bigquery dataset. - - Attributes: - dataset_reference (google.cloud.bigquery_data_exchange_v1beta1.types.DestinationDatasetReference): - Required. A reference that identifies the - destination dataset. - friendly_name (google.protobuf.wrappers_pb2.StringValue): - Optional. A descriptive name for the dataset. - description (google.protobuf.wrappers_pb2.StringValue): - Optional. A user-friendly description of the - dataset. - labels (MutableMapping[str, str]): - Optional. The labels associated with this - dataset. You can use these to organize and group - your datasets. You can set this property when - inserting or updating a dataset. See - https://cloud.google.com/resource-manager/docs/creating-managing-labels - for more information. - location (str): - Required. The geographic location where the - dataset should reside. See - https://cloud.google.com/bigquery/docs/locations - for supported locations. - """ - - dataset_reference: 'DestinationDatasetReference' = proto.Field( - proto.MESSAGE, - number=1, - message='DestinationDatasetReference', - ) - friendly_name: wrappers_pb2.StringValue = proto.Field( - proto.MESSAGE, - number=2, - message=wrappers_pb2.StringValue, - ) - description: wrappers_pb2.StringValue = proto.Field( - proto.MESSAGE, - number=3, - message=wrappers_pb2.StringValue, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - location: str = proto.Field( - proto.STRING, - number=5, - ) - - -class Listing(proto.Message): - r"""A listing is what gets published into a data exchange that a - subscriber can subscribe to. It contains a reference to the data - source along with descriptive information that will help - subscribers find and subscribe the data. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bigquery_dataset (google.cloud.bigquery_data_exchange_v1beta1.types.Listing.BigQueryDatasetSource): - Required. Shared dataset i.e. BigQuery - dataset source. - - This field is a member of `oneof`_ ``source``. - name (str): - Output only. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456`` - display_name (str): - Required. Human-readable display name of the listing. The - display name must contain only Unicode letters, numbers - (0-9), underscores (_), dashes (-), spaces ( ), ampersands - (&) and can't start or end with spaces. Default value is an - empty string. Max length: 63 bytes. - description (str): - Optional. Short description of the listing. - The description must not contain Unicode - non-characters and C0 and C1 control codes - except tabs (HT), new lines (LF), carriage - returns (CR), and page breaks (FF). Default - value is an empty string. - Max length: 2000 bytes. - primary_contact (str): - Optional. Email or URL of the primary point - of contact of the listing. Max Length: 1000 - bytes. - documentation (str): - Optional. Documentation describing the - listing. - state (google.cloud.bigquery_data_exchange_v1beta1.types.Listing.State): - Output only. Current state of the listing. - icon (bytes): - Optional. Base64 encoded image representing - the listing. Max Size: 3.0MiB Expected image - dimensions are 512x512 pixels, however the API - only performs validation on size of the encoded - data. Note: For byte fields, the contents of the - field are base64-encoded (which increases the - size of the data by 33-36%) when using JSON on - the wire. - data_provider (google.cloud.bigquery_data_exchange_v1beta1.types.DataProvider): - Optional. Details of the data provider who - owns the source data. - categories (MutableSequence[google.cloud.bigquery_data_exchange_v1beta1.types.Listing.Category]): - Optional. Categories of the listing. Up to - two categories are allowed. - publisher (google.cloud.bigquery_data_exchange_v1beta1.types.Publisher): - Optional. Details of the publisher who owns - the listing and who can share the source data. - request_access (str): - Optional. Email or URL of the request access - of the listing. Subscribers can use this - reference to request access. Max Length: 1000 - bytes. - """ - class State(proto.Enum): - r"""State of the listing. - - Values: - STATE_UNSPECIFIED (0): - Default value. This value is unused. - ACTIVE (1): - Subscribable state. Users with - dataexchange.listings.subscribe permission can - subscribe to this listing. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - - class Category(proto.Enum): - r"""Listing categories. - - Values: - CATEGORY_UNSPECIFIED (0): - No description available. - CATEGORY_OTHERS (1): - No description available. - CATEGORY_ADVERTISING_AND_MARKETING (2): - No description available. - CATEGORY_COMMERCE (3): - No description available. - CATEGORY_CLIMATE_AND_ENVIRONMENT (4): - No description available. - CATEGORY_DEMOGRAPHICS (5): - No description available. - CATEGORY_ECONOMICS (6): - No description available. - CATEGORY_EDUCATION (7): - No description available. - CATEGORY_ENERGY (8): - No description available. - CATEGORY_FINANCIAL (9): - No description available. - CATEGORY_GAMING (10): - No description available. - CATEGORY_GEOSPATIAL (11): - No description available. - CATEGORY_HEALTHCARE_AND_LIFE_SCIENCE (12): - No description available. - CATEGORY_MEDIA (13): - No description available. - CATEGORY_PUBLIC_SECTOR (14): - No description available. - CATEGORY_RETAIL (15): - No description available. - CATEGORY_SPORTS (16): - No description available. - CATEGORY_SCIENCE_AND_RESEARCH (17): - No description available. - CATEGORY_TRANSPORTATION_AND_LOGISTICS (18): - No description available. - CATEGORY_TRAVEL_AND_TOURISM (19): - No description available. - """ - CATEGORY_UNSPECIFIED = 0 - CATEGORY_OTHERS = 1 - CATEGORY_ADVERTISING_AND_MARKETING = 2 - CATEGORY_COMMERCE = 3 - CATEGORY_CLIMATE_AND_ENVIRONMENT = 4 - CATEGORY_DEMOGRAPHICS = 5 - CATEGORY_ECONOMICS = 6 - CATEGORY_EDUCATION = 7 - CATEGORY_ENERGY = 8 - CATEGORY_FINANCIAL = 9 - CATEGORY_GAMING = 10 - CATEGORY_GEOSPATIAL = 11 - CATEGORY_HEALTHCARE_AND_LIFE_SCIENCE = 12 - CATEGORY_MEDIA = 13 - CATEGORY_PUBLIC_SECTOR = 14 - CATEGORY_RETAIL = 15 - CATEGORY_SPORTS = 16 - CATEGORY_SCIENCE_AND_RESEARCH = 17 - CATEGORY_TRANSPORTATION_AND_LOGISTICS = 18 - CATEGORY_TRAVEL_AND_TOURISM = 19 - - class BigQueryDatasetSource(proto.Message): - r"""A reference to a shared dataset. It is an existing BigQuery dataset - with a collection of objects such as tables and views that you want - to share with subscribers. When subscriber's subscribe to a listing, - Analytics Hub creates a linked dataset in the subscriber's project. - A Linked dataset is an opaque, read-only BigQuery dataset that - serves as a *symbolic link* to a shared dataset. - - Attributes: - dataset (str): - Resource name of the dataset source for this listing. e.g. - ``projects/myproject/datasets/123`` - """ - - dataset: str = proto.Field( - proto.STRING, - number=1, - ) - - bigquery_dataset: BigQueryDatasetSource = proto.Field( - proto.MESSAGE, - number=6, - oneof='source', - message=BigQueryDatasetSource, - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - primary_contact: str = proto.Field( - proto.STRING, - number=4, - ) - documentation: str = proto.Field( - proto.STRING, - number=5, - ) - state: State = proto.Field( - proto.ENUM, - number=7, - enum=State, - ) - icon: bytes = proto.Field( - proto.BYTES, - number=8, - ) - data_provider: 'DataProvider' = proto.Field( - proto.MESSAGE, - number=9, - message='DataProvider', - ) - categories: MutableSequence[Category] = proto.RepeatedField( - proto.ENUM, - number=10, - enum=Category, - ) - publisher: 'Publisher' = proto.Field( - proto.MESSAGE, - number=11, - message='Publisher', - ) - request_access: str = proto.Field( - proto.STRING, - number=12, - ) - - -class ListDataExchangesRequest(proto.Message): - r"""Message for requesting the list of data exchanges. - - Attributes: - parent (str): - Required. The parent resource path of the data exchanges. - e.g. ``projects/myproject/locations/US``. - page_size (int): - The maximum number of results to return in a - single response page. Leverage the page tokens - to iterate through the entire collection. - page_token (str): - Page token, returned by a previous call, to - request the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDataExchangesResponse(proto.Message): - r"""Message for response to the list of data exchanges. - - Attributes: - data_exchanges (MutableSequence[google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange]): - The list of data exchanges. - next_page_token (str): - A token to request the next page of results. - """ - - @property - def raw_page(self): - return self - - data_exchanges: MutableSequence['DataExchange'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataExchange', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListOrgDataExchangesRequest(proto.Message): - r"""Message for requesting the list of data exchanges from - projects in an organization and location. - - Attributes: - organization (str): - Required. The organization resource path of the projects - containing DataExchanges. e.g. - ``organizations/myorg/locations/US``. - page_size (int): - The maximum number of results to return in a - single response page. Leverage the page tokens - to iterate through the entire collection. - page_token (str): - Page token, returned by a previous call, to - request the next page of results. - """ - - organization: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListOrgDataExchangesResponse(proto.Message): - r"""Message for response to listing data exchanges in an - organization and location. - - Attributes: - data_exchanges (MutableSequence[google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange]): - The list of data exchanges. - next_page_token (str): - A token to request the next page of results. - """ - - @property - def raw_page(self): - return self - - data_exchanges: MutableSequence['DataExchange'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataExchange', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetDataExchangeRequest(proto.Message): - r"""Message for getting a data exchange. - - Attributes: - name (str): - Required. The resource name of the data exchange. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDataExchangeRequest(proto.Message): - r"""Message for creating a data exchange. - - Attributes: - parent (str): - Required. The parent resource path of the data exchange. - e.g. ``projects/myproject/locations/US``. - data_exchange_id (str): - Required. The ID of the data exchange. Must contain only - Unicode letters, numbers (0-9), underscores (_). Should not - use characters that require URL-escaping, or characters - outside of ASCII, spaces. Max length: 100 bytes. - data_exchange (google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange): - Required. The data exchange to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_exchange_id: str = proto.Field( - proto.STRING, - number=2, - ) - data_exchange: 'DataExchange' = proto.Field( - proto.MESSAGE, - number=3, - message='DataExchange', - ) - - -class UpdateDataExchangeRequest(proto.Message): - r"""Message for updating a data exchange. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in the - data exchange resource. The fields specified in the - ``updateMask`` are relative to the resource and are not a - full request. - data_exchange (google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange): - Required. The data exchange to update. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - data_exchange: 'DataExchange' = proto.Field( - proto.MESSAGE, - number=2, - message='DataExchange', - ) - - -class DeleteDataExchangeRequest(proto.Message): - r"""Message for deleting a data exchange. - - Attributes: - name (str): - Required. The full name of the data exchange resource that - you want to delete. For example, - ``projects/myproject/locations/US/dataExchanges/123``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListListingsRequest(proto.Message): - r"""Message for requesting the list of listings. - - Attributes: - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - page_size (int): - The maximum number of results to return in a - single response page. Leverage the page tokens - to iterate through the entire collection. - page_token (str): - Page token, returned by a previous call, to - request the next page of results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListListingsResponse(proto.Message): - r"""Message for response to the list of Listings. - - Attributes: - listings (MutableSequence[google.cloud.bigquery_data_exchange_v1beta1.types.Listing]): - The list of Listing. - next_page_token (str): - A token to request the next page of results. - """ - - @property - def raw_page(self): - return self - - listings: MutableSequence['Listing'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Listing', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetListingRequest(proto.Message): - r"""Message for getting a listing. - - Attributes: - name (str): - Required. The resource name of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateListingRequest(proto.Message): - r"""Message for creating a listing. - - Attributes: - parent (str): - Required. The parent resource path of the listing. e.g. - ``projects/myproject/locations/US/dataExchanges/123``. - listing_id (str): - Required. The ID of the listing to create. Must contain only - Unicode letters, numbers (0-9), underscores (_). Should not - use characters that require URL-escaping, or characters - outside of ASCII, spaces. Max length: 100 bytes. - listing (google.cloud.bigquery_data_exchange_v1beta1.types.Listing): - Required. The listing to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - listing_id: str = proto.Field( - proto.STRING, - number=2, - ) - listing: 'Listing' = proto.Field( - proto.MESSAGE, - number=3, - message='Listing', - ) - - -class UpdateListingRequest(proto.Message): - r"""Message for updating a Listing. - - Attributes: - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Field mask specifies the fields to update in the - listing resource. The fields specified in the ``updateMask`` - are relative to the resource and are not a full request. - listing (google.cloud.bigquery_data_exchange_v1beta1.types.Listing): - Required. The listing to update. - """ - - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=1, - message=field_mask_pb2.FieldMask, - ) - listing: 'Listing' = proto.Field( - proto.MESSAGE, - number=2, - message='Listing', - ) - - -class DeleteListingRequest(proto.Message): - r"""Message for deleting a listing. - - Attributes: - name (str): - Required. Resource name of the listing to delete. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SubscribeListingRequest(proto.Message): - r"""Message for subscribing to a listing. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - destination_dataset (google.cloud.bigquery_data_exchange_v1beta1.types.DestinationDataset): - BigQuery destination dataset to create for - the subscriber. - - This field is a member of `oneof`_ ``destination``. - name (str): - Required. Resource name of the listing that you want to - subscribe to. e.g. - ``projects/myproject/locations/US/dataExchanges/123/listings/456``. - """ - - destination_dataset: 'DestinationDataset' = proto.Field( - proto.MESSAGE, - number=3, - oneof='destination', - message='DestinationDataset', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SubscribeListingResponse(proto.Message): - r"""Message for response when you subscribe to a listing. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/noxfile.py deleted file mode 100644 index bb4db0bd84b9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-data-exchange' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_data_exchange_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_data_exchange_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_async.py deleted file mode 100644 index 83fc805202d2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_CreateDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_create_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = await client.create_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_CreateDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_sync.py deleted file mode 100644 index 03485f075624..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_CreateDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_create_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateDataExchangeRequest( - parent="parent_value", - data_exchange_id="data_exchange_id_value", - data_exchange=data_exchange, - ) - - # Make the request - response = client.create_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_CreateDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_listing_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_listing_async.py deleted file mode 100644 index 599c3aa26162..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_listing_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_CreateListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_create_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = await client.create_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_CreateListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_listing_sync.py deleted file mode 100644 index bdc98f457076..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_create_listing_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_CreateListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_create_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.CreateListingRequest( - parent="parent_value", - listing_id="listing_id_value", - listing=listing, - ) - - # Make the request - response = client.create_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_CreateListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_async.py deleted file mode 100644 index 9f02192a96fb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_DeleteDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_delete_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_exchange(request=request) - - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_DeleteDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_sync.py deleted file mode 100644 index 1a232576d900..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_DeleteDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_delete_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteDataExchangeRequest( - name="name_value", - ) - - # Make the request - client.delete_data_exchange(request=request) - - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_DeleteDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_async.py deleted file mode 100644 index 266aa2c401ec..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_DeleteListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_delete_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - await client.delete_listing(request=request) - - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_DeleteListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_sync.py deleted file mode 100644 index 8440f222b98f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_DeleteListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_delete_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.DeleteListingRequest( - name="name_value", - ) - - # Make the request - client.delete_listing(request=request) - - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_DeleteListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_async.py deleted file mode 100644 index 08e40222d91e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_GetDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_get_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_GetDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_sync.py deleted file mode 100644 index 86e30c9b2ddf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_GetDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_get_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetDataExchangeRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_GetDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_async.py deleted file mode 100644 index 45564f571187..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_sync.py deleted file mode 100644 index 771de309ddd6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_listing_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_listing_async.py deleted file mode 100644 index 93b33d5ed576..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_listing_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_GetListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_get_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = await client.get_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_GetListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_listing_sync.py deleted file mode 100644 index 9f72fd3d4c15..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_get_listing_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_GetListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_get_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.GetListingRequest( - name="name_value", - ) - - # Make the request - response = client.get_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_GetListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_async.py deleted file mode 100644 index 8056dddda96a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_ListDataExchanges_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_list_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_ListDataExchanges_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_sync.py deleted file mode 100644 index 537489f407b8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_ListDataExchanges_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_list_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListDataExchangesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_ListDataExchanges_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_listings_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_listings_async.py deleted file mode 100644 index 9e8064c72fbd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_listings_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListListings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_ListListings_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_list_listings(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_ListListings_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_listings_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_listings_sync.py deleted file mode 100644 index b02f6e437b52..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_listings_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListListings -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_ListListings_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_list_listings(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListListingsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_listings(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_ListListings_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_async.py deleted file mode 100644 index b35818d4c168..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListOrgDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_ListOrgDataExchanges_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_ListOrgDataExchanges_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_sync.py deleted file mode 100644 index b100ede70cfb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListOrgDataExchanges -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_ListOrgDataExchanges_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_list_org_data_exchanges(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = bigquery_data_exchange_v1beta1.ListOrgDataExchangesRequest( - organization="organization_value", - ) - - # Make the request - page_result = client.list_org_data_exchanges(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_ListOrgDataExchanges_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_async.py deleted file mode 100644 index 923da3db4a26..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_sync.py deleted file mode 100644 index fa9e477eaf3a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_async.py deleted file mode 100644 index a69963316cc8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_async.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubscribeListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_SubscribeListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_subscribe_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - destination_dataset = bigquery_data_exchange_v1beta1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_data_exchange_v1beta1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = await client.subscribe_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_SubscribeListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_sync.py deleted file mode 100644 index bb10db3d1b83..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_sync.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SubscribeListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_SubscribeListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_subscribe_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - destination_dataset = bigquery_data_exchange_v1beta1.DestinationDataset() - destination_dataset.dataset_reference.dataset_id = "dataset_id_value" - destination_dataset.dataset_reference.project_id = "project_id_value" - destination_dataset.location = "location_value" - - request = bigquery_data_exchange_v1beta1.SubscribeListingRequest( - destination_dataset=destination_dataset, - name="name_value", - ) - - # Make the request - response = client.subscribe_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_SubscribeListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_async.py deleted file mode 100644 index ac7f54dfeccb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_sync.py deleted file mode 100644 index e67b6a61df40..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_async.py deleted file mode 100644 index bc8bebe53368..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_UpdateDataExchange_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_update_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = await client.update_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_UpdateDataExchange_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_sync.py deleted file mode 100644 index eaa5e7f3a1b3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataExchange -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_UpdateDataExchange_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_update_data_exchange(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - data_exchange = bigquery_data_exchange_v1beta1.DataExchange() - data_exchange.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateDataExchangeRequest( - data_exchange=data_exchange, - ) - - # Make the request - response = client.update_data_exchange(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_UpdateDataExchange_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_listing_async.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_listing_async.py deleted file mode 100644 index a6f14e1c2eb4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_listing_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_UpdateListing_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -async def sample_update_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = await client.update_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_UpdateListing_async] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_listing_sync.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_listing_sync.py deleted file mode 100644 index eee544ee11eb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/analyticshub_v1beta1_generated_analytics_hub_service_update_listing_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateListing -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-data-exchange - - -# [START analyticshub_v1beta1_generated_AnalyticsHubService_UpdateListing_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_data_exchange_v1beta1 - - -def sample_update_listing(): - # Create a client - client = bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient() - - # Initialize request argument(s) - listing = bigquery_data_exchange_v1beta1.Listing() - listing.display_name = "display_name_value" - - request = bigquery_data_exchange_v1beta1.UpdateListingRequest( - listing=listing, - ) - - # Make the request - response = client.update_listing(request=request) - - # Handle the response - print(response) - -# [END analyticshub_v1beta1_generated_AnalyticsHubService_UpdateListing_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json deleted file mode 100644 index 0103e35cd88a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ /dev/null @@ -1,2426 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.dataexchange.v1beta1", - "version": "v1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-data-exchange", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.create_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.CreateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.CreateDataExchangeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange", - "shortName": "create_data_exchange" - }, - "description": "Sample for CreateDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_CreateDataExchange_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.create_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.CreateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.CreateDataExchangeRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange", - "shortName": "create_data_exchange" - }, - "description": "Sample for CreateDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_CreateDataExchange_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_create_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.create_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.CreateListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.CreateListingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing", - "shortName": "create_listing" - }, - "description": "Sample for CreateListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_create_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_CreateListing_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_create_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.create_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.CreateListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "CreateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.CreateListingRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing", - "shortName": "create_listing" - }, - "description": "Sample for CreateListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_create_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_CreateListing_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_create_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.delete_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.DeleteDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DeleteDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_exchange" - }, - "description": "Sample for DeleteDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_DeleteDataExchange_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.delete_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.DeleteDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DeleteDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_exchange" - }, - "description": "Sample for DeleteDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_DeleteDataExchange_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_delete_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.delete_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.DeleteListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DeleteListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_listing" - }, - "description": "Sample for DeleteListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_DeleteListing_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.delete_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.DeleteListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "DeleteListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DeleteListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_listing" - }, - "description": "Sample for DeleteListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_DeleteListing_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_delete_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.get_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.GetDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.GetDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange", - "shortName": "get_data_exchange" - }, - "description": "Sample for GetDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_GetDataExchange_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.get_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.GetDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.GetDataExchangeRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange", - "shortName": "get_data_exchange" - }, - "description": "Sample for GetDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_GetDataExchange_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_get_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.get_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.GetListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.GetListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing", - "shortName": "get_listing" - }, - "description": "Sample for GetListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_get_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_GetListing_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_get_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.get_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.GetListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "GetListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.GetListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing", - "shortName": "get_listing" - }, - "description": "Sample for GetListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_get_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_GetListing_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_get_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.list_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.ListDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListDataExchangesAsyncPager", - "shortName": "list_data_exchanges" - }, - "description": "Sample for ListDataExchanges", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_ListDataExchanges_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.list_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.ListDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.ListDataExchangesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListDataExchangesPager", - "shortName": "list_data_exchanges" - }, - "description": "Sample for ListDataExchanges", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_ListDataExchanges_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_list_data_exchanges_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.list_listings", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.ListListings", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListListings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListListingsAsyncPager", - "shortName": "list_listings" - }, - "description": "Sample for ListListings", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_list_listings_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_ListListings_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_list_listings_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.list_listings", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.ListListings", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListListings" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.ListListingsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListListingsPager", - "shortName": "list_listings" - }, - "description": "Sample for ListListings", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_list_listings_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_ListListings_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_list_listings_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.list_org_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.ListOrgDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListOrgDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesRequest" - }, - { - "name": "organization", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListOrgDataExchangesAsyncPager", - "shortName": "list_org_data_exchanges" - }, - "description": "Sample for ListOrgDataExchanges", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_ListOrgDataExchanges_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.list_org_data_exchanges", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.ListOrgDataExchanges", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "ListOrgDataExchanges" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.ListOrgDataExchangesRequest" - }, - { - "name": "organization", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.pagers.ListOrgDataExchangesPager", - "shortName": "list_org_data_exchanges" - }, - "description": "Sample for ListOrgDataExchanges", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_ListOrgDataExchanges_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_list_org_data_exchanges_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.subscribe_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.SubscribeListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SubscribeListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingResponse", - "shortName": "subscribe_listing" - }, - "description": "Sample for SubscribeListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_SubscribeListing_async", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.subscribe_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.SubscribeListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "SubscribeListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.SubscribeListingResponse", - "shortName": "subscribe_listing" - }, - "description": "Sample for SubscribeListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_SubscribeListing_sync", - "segments": [ - { - "end": 57, - "start": 27, - "type": "FULL" - }, - { - "end": 57, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 51, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 54, - "start": 52, - "type": "REQUEST_EXECUTION" - }, - { - "end": 58, - "start": 55, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_subscribe_listing_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.update_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.UpdateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.UpdateDataExchangeRequest" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange", - "shortName": "update_data_exchange" - }, - "description": "Sample for UpdateDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_UpdateDataExchange_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.update_data_exchange", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.UpdateDataExchange", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateDataExchange" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.UpdateDataExchangeRequest" - }, - { - "name": "data_exchange", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.DataExchange", - "shortName": "update_data_exchange" - }, - "description": "Sample for UpdateDataExchange", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_UpdateDataExchange_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_update_data_exchange_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient", - "shortName": "AnalyticsHubServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceAsyncClient.update_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.UpdateListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.UpdateListingRequest" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing", - "shortName": "update_listing" - }, - "description": "Sample for UpdateListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_update_listing_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_UpdateListing_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_update_listing_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient", - "shortName": "AnalyticsHubServiceClient" - }, - "fullName": "google.cloud.bigquery_data_exchange_v1beta1.AnalyticsHubServiceClient.update_listing", - "method": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService.UpdateListing", - "service": { - "fullName": "google.cloud.bigquery.dataexchange.v1beta1.AnalyticsHubService", - "shortName": "AnalyticsHubService" - }, - "shortName": "UpdateListing" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.UpdateListingRequest" - }, - { - "name": "listing", - "type": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_data_exchange_v1beta1.types.Listing", - "shortName": "update_listing" - }, - "description": "Sample for UpdateListing", - "file": "analyticshub_v1beta1_generated_analytics_hub_service_update_listing_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "analyticshub_v1beta1_generated_AnalyticsHubService_UpdateListing_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "analyticshub_v1beta1_generated_analytics_hub_service_update_listing_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/scripts/fixup_bigquery_data_exchange_v1beta1_keywords.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/scripts/fixup_bigquery_data_exchange_v1beta1_keywords.py deleted file mode 100644 index 99a68d3550d4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/scripts/fixup_bigquery_data_exchange_v1beta1_keywords.py +++ /dev/null @@ -1,190 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_data_exchangeCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_data_exchange': ('parent', 'data_exchange_id', 'data_exchange', ), - 'create_listing': ('parent', 'listing_id', 'listing', ), - 'delete_data_exchange': ('name', ), - 'delete_listing': ('name', ), - 'get_data_exchange': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'get_listing': ('name', ), - 'list_data_exchanges': ('parent', 'page_size', 'page_token', ), - 'list_listings': ('parent', 'page_size', 'page_token', ), - 'list_org_data_exchanges': ('organization', 'page_size', 'page_token', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'subscribe_listing': ('name', 'destination_dataset', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_data_exchange': ('update_mask', 'data_exchange', ), - 'update_listing': ('update_mask', 'listing', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_data_exchangeCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_data_exchange client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/setup.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/setup.py deleted file mode 100644 index e1df8135efd5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-data-exchange' - - -description = "Google Cloud Bigquery Data Exchange API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_data_exchange/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-data-exchange" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/bigquery_data_exchange_v1beta1/__init__.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/bigquery_data_exchange_v1beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/bigquery_data_exchange_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py b/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py deleted file mode 100644 index 70c72b23ba3a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-data-exchange/v1beta1/tests/unit/gapic/bigquery_data_exchange_v1beta1/test_analytics_hub_service.py +++ /dev/null @@ -1,7751 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service import AnalyticsHubServiceAsyncClient -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service import AnalyticsHubServiceClient -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service import pagers -from google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service import transports -from google.cloud.bigquery_data_exchange_v1beta1.types import dataexchange -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(None) is None - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert AnalyticsHubServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AnalyticsHubServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - AnalyticsHubServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert AnalyticsHubServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert AnalyticsHubServiceClient._get_client_cert_source(None, False) is None - assert AnalyticsHubServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert AnalyticsHubServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert AnalyticsHubServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert AnalyticsHubServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - default_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert AnalyticsHubServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert AnalyticsHubServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, default_universe, "always") == AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - assert AnalyticsHubServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AnalyticsHubServiceClient.DEFAULT_MTLS_ENDPOINT - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert AnalyticsHubServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - AnalyticsHubServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert AnalyticsHubServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert AnalyticsHubServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert AnalyticsHubServiceClient._get_universe_domain(None, None) == AnalyticsHubServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - AnalyticsHubServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = AnalyticsHubServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = AnalyticsHubServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (AnalyticsHubServiceClient, "grpc"), - (AnalyticsHubServiceAsyncClient, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'analyticshub.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.AnalyticsHubServiceGrpcTransport, "grpc"), - (transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (AnalyticsHubServiceClient, "grpc"), - (AnalyticsHubServiceAsyncClient, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'analyticshub.googleapis.com:443' - ) - - -def test_analytics_hub_service_client_get_transport_class(): - transport = AnalyticsHubServiceClient.get_transport_class() - available_transports = [ - transports.AnalyticsHubServiceGrpcTransport, - ] - assert transport in available_transports - - transport = AnalyticsHubServiceClient.get_transport_class("grpc") - assert transport == transports.AnalyticsHubServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -def test_analytics_hub_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AnalyticsHubServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AnalyticsHubServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", "true"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", "false"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_analytics_hub_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - AnalyticsHubServiceClient, AnalyticsHubServiceAsyncClient -]) -@mock.patch.object(AnalyticsHubServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AnalyticsHubServiceAsyncClient)) -def test_analytics_hub_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - AnalyticsHubServiceClient, AnalyticsHubServiceAsyncClient -]) -@mock.patch.object(AnalyticsHubServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceClient)) -@mock.patch.object(AnalyticsHubServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AnalyticsHubServiceAsyncClient)) -def test_analytics_hub_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = AnalyticsHubServiceClient._DEFAULT_UNIVERSE - default_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = AnalyticsHubServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc"), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_analytics_hub_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", grpc_helpers), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_analytics_hub_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_analytics_hub_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.transports.AnalyticsHubServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = AnalyticsHubServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport, "grpc", grpc_helpers), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_analytics_hub_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "analyticshub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="analyticshub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.ListDataExchangesRequest, - dict, -]) -def test_list_data_exchanges(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListDataExchangesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.ListDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataExchangesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_exchanges_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.ListDataExchangesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_exchanges(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.ListDataExchangesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_data_exchanges_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_exchanges in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_exchanges] = mock_rpc - request = {} - client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_exchanges in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_exchanges] = mock_rpc - - request = {} - await client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_exchanges_async(transport: str = 'grpc_asyncio', request_type=dataexchange.ListDataExchangesRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListDataExchangesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.ListDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataExchangesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_from_dict(): - await test_list_data_exchanges_async(request_type=dict) - -def test_list_data_exchanges_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.ListDataExchangesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value = dataexchange.ListDataExchangesResponse() - client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_exchanges_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.ListDataExchangesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListDataExchangesResponse()) - await client.list_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_exchanges_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListDataExchangesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_exchanges( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_exchanges_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_exchanges( - dataexchange.ListDataExchangesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_exchanges_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListDataExchangesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListDataExchangesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_exchanges( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_exchanges_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_exchanges( - dataexchange.ListDataExchangesRequest(), - parent='parent_value', - ) - - -def test_list_data_exchanges_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_exchanges(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataexchange.DataExchange) - for i in results) -def test_list_data_exchanges_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_exchanges(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_exchanges(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataexchange.DataExchange) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_exchanges_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_exchanges(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dataexchange.ListOrgDataExchangesRequest, - dict, -]) -def test_list_org_data_exchanges(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListOrgDataExchangesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.ListOrgDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrgDataExchangesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_org_data_exchanges_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.ListOrgDataExchangesRequest( - organization='organization_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_org_data_exchanges(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.ListOrgDataExchangesRequest( - organization='organization_value', - page_token='page_token_value', - ) - -def test_list_org_data_exchanges_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_org_data_exchanges in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_org_data_exchanges] = mock_rpc - request = {} - client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_org_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_org_data_exchanges in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_org_data_exchanges] = mock_rpc - - request = {} - await client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_org_data_exchanges(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async(transport: str = 'grpc_asyncio', request_type=dataexchange.ListOrgDataExchangesRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListOrgDataExchangesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.ListOrgDataExchangesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListOrgDataExchangesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_from_dict(): - await test_list_org_data_exchanges_async(request_type=dict) - -def test_list_org_data_exchanges_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.ListOrgDataExchangesRequest() - - request.organization = 'organization_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value = dataexchange.ListOrgDataExchangesResponse() - client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'organization=organization_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.ListOrgDataExchangesRequest() - - request.organization = 'organization_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListOrgDataExchangesResponse()) - await client.list_org_data_exchanges(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'organization=organization_value', - ) in kw['metadata'] - - -def test_list_org_data_exchanges_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListOrgDataExchangesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_org_data_exchanges( - organization='organization_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].organization - mock_val = 'organization_value' - assert arg == mock_val - - -def test_list_org_data_exchanges_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_org_data_exchanges( - dataexchange.ListOrgDataExchangesRequest(), - organization='organization_value', - ) - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListOrgDataExchangesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListOrgDataExchangesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_org_data_exchanges( - organization='organization_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].organization - mock_val = 'organization_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_org_data_exchanges( - dataexchange.ListOrgDataExchangesRequest(), - organization='organization_value', - ) - - -def test_list_org_data_exchanges_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('organization', ''), - )), - ) - pager = client.list_org_data_exchanges(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataexchange.DataExchange) - for i in results) -def test_list_org_data_exchanges_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - pages = list(client.list_org_data_exchanges(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_org_data_exchanges(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataexchange.DataExchange) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_org_data_exchanges_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - next_page_token='abc', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[], - next_page_token='def', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - ], - next_page_token='ghi', - ), - dataexchange.ListOrgDataExchangesResponse( - data_exchanges=[ - dataexchange.DataExchange(), - dataexchange.DataExchange(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_org_data_exchanges(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dataexchange.GetDataExchangeRequest, - dict, -]) -def test_get_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - ) - response = client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.GetDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - - -def test_get_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.GetDataExchangeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.GetDataExchangeRequest( - name='name_value', - ) - -def test_get_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_exchange] = mock_rpc - request = {} - client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_exchange] = mock_rpc - - request = {} - await client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_exchange_async(transport: str = 'grpc_asyncio', request_type=dataexchange.GetDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - )) - response = await client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.GetDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - - -@pytest.mark.asyncio -async def test_get_data_exchange_async_from_dict(): - await test_get_data_exchange_async(request_type=dict) - -def test_get_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.GetDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value = dataexchange.DataExchange() - client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.GetDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange()) - await client.get_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_exchange( - dataexchange.GetDataExchangeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_exchange( - dataexchange.GetDataExchangeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.CreateDataExchangeRequest, - dict, -]) -def test_create_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - ) - response = client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.CreateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - - -def test_create_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.CreateDataExchangeRequest( - parent='parent_value', - data_exchange_id='data_exchange_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.CreateDataExchangeRequest( - parent='parent_value', - data_exchange_id='data_exchange_id_value', - ) - -def test_create_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_exchange] = mock_rpc - request = {} - client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_exchange] = mock_rpc - - request = {} - await client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_exchange_async(transport: str = 'grpc_asyncio', request_type=dataexchange.CreateDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - )) - response = await client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.CreateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - - -@pytest.mark.asyncio -async def test_create_data_exchange_async_from_dict(): - await test_create_data_exchange_async(request_type=dict) - -def test_create_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.CreateDataExchangeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value = dataexchange.DataExchange() - client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.CreateDataExchangeRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange()) - await client.create_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_exchange( - parent='parent_value', - data_exchange=dataexchange.DataExchange(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_exchange - mock_val = dataexchange.DataExchange(name='name_value') - assert arg == mock_val - - -def test_create_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_exchange( - dataexchange.CreateDataExchangeRequest(), - parent='parent_value', - data_exchange=dataexchange.DataExchange(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_exchange( - parent='parent_value', - data_exchange=dataexchange.DataExchange(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_exchange - mock_val = dataexchange.DataExchange(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_exchange( - dataexchange.CreateDataExchangeRequest(), - parent='parent_value', - data_exchange=dataexchange.DataExchange(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.UpdateDataExchangeRequest, - dict, -]) -def test_update_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - ) - response = client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.UpdateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - - -def test_update_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.UpdateDataExchangeRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.UpdateDataExchangeRequest( - ) - -def test_update_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_exchange] = mock_rpc - request = {} - client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_exchange] = mock_rpc - - request = {} - await client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_exchange_async(transport: str = 'grpc_asyncio', request_type=dataexchange.UpdateDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - )) - response = await client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.UpdateDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.DataExchange) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.listing_count == 1410 - assert response.icon == b'icon_blob' - - -@pytest.mark.asyncio -async def test_update_data_exchange_async_from_dict(): - await test_update_data_exchange_async(request_type=dict) - -def test_update_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.UpdateDataExchangeRequest() - - request.data_exchange.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value = dataexchange.DataExchange() - client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_exchange.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.UpdateDataExchangeRequest() - - request.data_exchange.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange()) - await client.update_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_exchange.name=name_value', - ) in kw['metadata'] - - -def test_update_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_exchange( - data_exchange=dataexchange.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_exchange - mock_val = dataexchange.DataExchange(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_exchange( - dataexchange.UpdateDataExchangeRequest(), - data_exchange=dataexchange.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.DataExchange() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_exchange( - data_exchange=dataexchange.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_exchange - mock_val = dataexchange.DataExchange(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_exchange( - dataexchange.UpdateDataExchangeRequest(), - data_exchange=dataexchange.DataExchange(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.DeleteDataExchangeRequest, - dict, -]) -def test_delete_data_exchange(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.DeleteDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_data_exchange_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.DeleteDataExchangeRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_exchange(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.DeleteDataExchangeRequest( - name='name_value', - ) - -def test_delete_data_exchange_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_exchange in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_exchange] = mock_rpc - request = {} - client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_exchange_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_exchange in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_exchange] = mock_rpc - - request = {} - await client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_data_exchange(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_exchange_async(transport: str = 'grpc_asyncio', request_type=dataexchange.DeleteDataExchangeRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.DeleteDataExchangeRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_data_exchange_async_from_dict(): - await test_delete_data_exchange_async(request_type=dict) - -def test_delete_data_exchange_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.DeleteDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value = None - client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_exchange_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.DeleteDataExchangeRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_exchange(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_exchange_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_exchange_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_exchange( - dataexchange.DeleteDataExchangeRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_exchange_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_exchange( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_exchange_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_exchange( - dataexchange.DeleteDataExchangeRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.ListListingsRequest, - dict, -]) -def test_list_listings(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListListingsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.ListListingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListListingsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_listings_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.ListListingsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_listings(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.ListListingsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_listings_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_listings in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_listings] = mock_rpc - request = {} - client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_listings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_listings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_listings in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_listings] = mock_rpc - - request = {} - await client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_listings(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_listings_async(transport: str = 'grpc_asyncio', request_type=dataexchange.ListListingsRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListListingsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.ListListingsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListListingsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_listings_async_from_dict(): - await test_list_listings_async(request_type=dict) - -def test_list_listings_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.ListListingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value = dataexchange.ListListingsResponse() - client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_listings_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.ListListingsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListListingsResponse()) - await client.list_listings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_listings_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListListingsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_listings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_listings_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_listings( - dataexchange.ListListingsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_listings_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.ListListingsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListListingsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_listings( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_listings_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_listings( - dataexchange.ListListingsRequest(), - parent='parent_value', - ) - - -def test_list_listings_pager(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - dataexchange.Listing(), - ], - next_page_token='abc', - ), - dataexchange.ListListingsResponse( - listings=[], - next_page_token='def', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - ], - next_page_token='ghi', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_listings(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dataexchange.Listing) - for i in results) -def test_list_listings_pages(transport_name: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - dataexchange.Listing(), - ], - next_page_token='abc', - ), - dataexchange.ListListingsResponse( - listings=[], - next_page_token='def', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - ], - next_page_token='ghi', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - ], - ), - RuntimeError, - ) - pages = list(client.list_listings(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_listings_async_pager(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - dataexchange.Listing(), - ], - next_page_token='abc', - ), - dataexchange.ListListingsResponse( - listings=[], - next_page_token='def', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - ], - next_page_token='ghi', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_listings(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dataexchange.Listing) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_listings_async_pages(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - dataexchange.Listing(), - ], - next_page_token='abc', - ), - dataexchange.ListListingsResponse( - listings=[], - next_page_token='def', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - ], - next_page_token='ghi', - ), - dataexchange.ListListingsResponse( - listings=[ - dataexchange.Listing(), - dataexchange.Listing(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_listings(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - dataexchange.GetListingRequest, - dict, -]) -def test_get_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - ) - response = client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.GetListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == dataexchange.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [dataexchange.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - - -def test_get_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.GetListingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.GetListingRequest( - name='name_value', - ) - -def test_get_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_listing] = mock_rpc - request = {} - client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_listing] = mock_rpc - - request = {} - await client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_listing_async(transport: str = 'grpc_asyncio', request_type=dataexchange.GetListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - )) - response = await client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.GetListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == dataexchange.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [dataexchange.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - - -@pytest.mark.asyncio -async def test_get_listing_async_from_dict(): - await test_get_listing_async(request_type=dict) - -def test_get_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.GetListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value = dataexchange.Listing() - client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.GetListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing()) - await client.get_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_listing( - dataexchange.GetListingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_listing( - dataexchange.GetListingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.CreateListingRequest, - dict, -]) -def test_create_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - ) - response = client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.CreateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == dataexchange.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [dataexchange.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - - -def test_create_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.CreateListingRequest( - parent='parent_value', - listing_id='listing_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.CreateListingRequest( - parent='parent_value', - listing_id='listing_id_value', - ) - -def test_create_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_listing] = mock_rpc - request = {} - client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_listing] = mock_rpc - - request = {} - await client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_listing_async(transport: str = 'grpc_asyncio', request_type=dataexchange.CreateListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - )) - response = await client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.CreateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == dataexchange.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [dataexchange.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - - -@pytest.mark.asyncio -async def test_create_listing_async_from_dict(): - await test_create_listing_async(request_type=dict) - -def test_create_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.CreateListingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value = dataexchange.Listing() - client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.CreateListingRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing()) - await client.create_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_listing( - parent='parent_value', - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].listing - mock_val = dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - - -def test_create_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_listing( - dataexchange.CreateListingRequest(), - parent='parent_value', - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - -@pytest.mark.asyncio -async def test_create_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_listing( - parent='parent_value', - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].listing - mock_val = dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_listing( - dataexchange.CreateListingRequest(), - parent='parent_value', - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.UpdateListingRequest, - dict, -]) -def test_update_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - ) - response = client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.UpdateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == dataexchange.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [dataexchange.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - - -def test_update_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.UpdateListingRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.UpdateListingRequest( - ) - -def test_update_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_listing] = mock_rpc - request = {} - client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_listing] = mock_rpc - - request = {} - await client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_listing_async(transport: str = 'grpc_asyncio', request_type=dataexchange.UpdateListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - )) - response = await client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.UpdateListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.Listing) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.primary_contact == 'primary_contact_value' - assert response.documentation == 'documentation_value' - assert response.state == dataexchange.Listing.State.ACTIVE - assert response.icon == b'icon_blob' - assert response.categories == [dataexchange.Listing.Category.CATEGORY_OTHERS] - assert response.request_access == 'request_access_value' - - -@pytest.mark.asyncio -async def test_update_listing_async_from_dict(): - await test_update_listing_async(request_type=dict) - -def test_update_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.UpdateListingRequest() - - request.listing.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value = dataexchange.Listing() - client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'listing.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.UpdateListingRequest() - - request.listing.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing()) - await client.update_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'listing.name=name_value', - ) in kw['metadata'] - - -def test_update_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_listing( - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].listing - mock_val = dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_listing( - dataexchange.UpdateListingRequest(), - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.Listing() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_listing( - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].listing - mock_val = dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')) - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_listing( - dataexchange.UpdateListingRequest(), - listing=dataexchange.Listing(bigquery_dataset=dataexchange.Listing.BigQueryDatasetSource(dataset='dataset_value')), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.DeleteListingRequest, - dict, -]) -def test_delete_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.DeleteListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.DeleteListingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.DeleteListingRequest( - name='name_value', - ) - -def test_delete_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_listing] = mock_rpc - request = {} - client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_listing] = mock_rpc - - request = {} - await client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_listing_async(transport: str = 'grpc_asyncio', request_type=dataexchange.DeleteListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.DeleteListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_listing_async_from_dict(): - await test_delete_listing_async(request_type=dict) - -def test_delete_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.DeleteListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value = None - client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.DeleteListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_listing( - dataexchange.DeleteListingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_listing( - dataexchange.DeleteListingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - dataexchange.SubscribeListingRequest, - dict, -]) -def test_subscribe_listing(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.SubscribeListingResponse( - ) - response = client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = dataexchange.SubscribeListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.SubscribeListingResponse) - - -def test_subscribe_listing_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = dataexchange.SubscribeListingRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.subscribe_listing(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dataexchange.SubscribeListingRequest( - name='name_value', - ) - -def test_subscribe_listing_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.subscribe_listing in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.subscribe_listing] = mock_rpc - request = {} - client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.subscribe_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_subscribe_listing_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.subscribe_listing in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.subscribe_listing] = mock_rpc - - request = {} - await client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.subscribe_listing(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_subscribe_listing_async(transport: str = 'grpc_asyncio', request_type=dataexchange.SubscribeListingRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.SubscribeListingResponse( - )) - response = await client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = dataexchange.SubscribeListingRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, dataexchange.SubscribeListingResponse) - - -@pytest.mark.asyncio -async def test_subscribe_listing_async_from_dict(): - await test_subscribe_listing_async(request_type=dict) - -def test_subscribe_listing_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.SubscribeListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value = dataexchange.SubscribeListingResponse() - client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_subscribe_listing_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dataexchange.SubscribeListingRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.SubscribeListingResponse()) - await client.subscribe_listing(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_subscribe_listing_flattened(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.SubscribeListingResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.subscribe_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_subscribe_listing_flattened_error(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.subscribe_listing( - dataexchange.SubscribeListingRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_subscribe_listing_flattened_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dataexchange.SubscribeListingResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.SubscribeListingResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.subscribe_listing( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_subscribe_listing_flattened_error_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.subscribe_listing( - dataexchange.SubscribeListingRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = AnalyticsHubServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = AnalyticsHubServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.AnalyticsHubServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.AnalyticsHubServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.AnalyticsHubServiceGrpcTransport, - transports.AnalyticsHubServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = AnalyticsHubServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_exchanges_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - call.return_value = dataexchange.ListDataExchangesResponse() - client.list_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.ListDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_org_data_exchanges_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - call.return_value = dataexchange.ListOrgDataExchangesResponse() - client.list_org_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.ListOrgDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - call.return_value = dataexchange.DataExchange() - client.get_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.GetDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - call.return_value = dataexchange.DataExchange() - client.create_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.CreateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - call.return_value = dataexchange.DataExchange() - client.update_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.UpdateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_exchange_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - call.return_value = None - client.delete_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.DeleteDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_listings_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - call.return_value = dataexchange.ListListingsResponse() - client.list_listings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.ListListingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - call.return_value = dataexchange.Listing() - client.get_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.GetListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - call.return_value = dataexchange.Listing() - client.create_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.CreateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - call.return_value = dataexchange.Listing() - client.update_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.UpdateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - call.return_value = None - client.delete_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.DeleteListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_subscribe_listing_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - call.return_value = dataexchange.SubscribeListingResponse() - client.subscribe_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.SubscribeListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = AnalyticsHubServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_exchanges_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListDataExchangesResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.ListDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_org_data_exchanges_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_org_data_exchanges), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListOrgDataExchangesResponse( - next_page_token='next_page_token_value', - )) - await client.list_org_data_exchanges(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.ListOrgDataExchangesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - )) - await client.get_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.GetDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - )) - await client.create_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.CreateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.DataExchange( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - listing_count=1410, - icon=b'icon_blob', - )) - await client.update_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.UpdateDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_exchange_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_exchange), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_exchange(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.DeleteDataExchangeRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_listings_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_listings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.ListListingsResponse( - next_page_token='next_page_token_value', - )) - await client.list_listings(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.ListListingsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - )) - await client.get_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.GetListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - )) - await client.create_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.CreateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.Listing( - name='name_value', - display_name='display_name_value', - description='description_value', - primary_contact='primary_contact_value', - documentation='documentation_value', - state=dataexchange.Listing.State.ACTIVE, - icon=b'icon_blob', - categories=[dataexchange.Listing.Category.CATEGORY_OTHERS], - request_access='request_access_value', - )) - await client.update_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.UpdateListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.DeleteListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_subscribe_listing_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.subscribe_listing), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dataexchange.SubscribeListingResponse( - )) - await client.subscribe_listing(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dataexchange.SubscribeListingRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.AnalyticsHubServiceGrpcTransport, - ) - -def test_analytics_hub_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.AnalyticsHubServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_analytics_hub_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.transports.AnalyticsHubServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.AnalyticsHubServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_data_exchanges', - 'list_org_data_exchanges', - 'get_data_exchange', - 'create_data_exchange', - 'update_data_exchange', - 'delete_data_exchange', - 'list_listings', - 'get_listing', - 'create_listing', - 'update_listing', - 'delete_listing', - 'subscribe_listing', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_analytics_hub_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.transports.AnalyticsHubServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AnalyticsHubServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_analytics_hub_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_data_exchange_v1beta1.services.analytics_hub_service.transports.AnalyticsHubServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AnalyticsHubServiceTransport() - adc.assert_called_once() - - -def test_analytics_hub_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AnalyticsHubServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AnalyticsHubServiceGrpcTransport, - transports.AnalyticsHubServiceGrpcAsyncIOTransport, - ], -) -def test_analytics_hub_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.AnalyticsHubServiceGrpcTransport, - transports.AnalyticsHubServiceGrpcAsyncIOTransport, - ], -) -def test_analytics_hub_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.AnalyticsHubServiceGrpcTransport, grpc_helpers), - (transports.AnalyticsHubServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_analytics_hub_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "analyticshub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="analyticshub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.AnalyticsHubServiceGrpcTransport, transports.AnalyticsHubServiceGrpcAsyncIOTransport]) -def test_analytics_hub_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_analytics_hub_service_host_no_port(transport_name): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='analyticshub.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'analyticshub.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_analytics_hub_service_host_with_port(transport_name): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='analyticshub.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'analyticshub.googleapis.com:8000' - ) - -def test_analytics_hub_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AnalyticsHubServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_analytics_hub_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.AnalyticsHubServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AnalyticsHubServiceGrpcTransport, transports.AnalyticsHubServiceGrpcAsyncIOTransport]) -def test_analytics_hub_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.AnalyticsHubServiceGrpcTransport, transports.AnalyticsHubServiceGrpcAsyncIOTransport]) -def test_analytics_hub_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_exchange_path(): - project = "squid" - location = "clam" - data_exchange = "whelk" - expected = "projects/{project}/locations/{location}/dataExchanges/{data_exchange}".format(project=project, location=location, data_exchange=data_exchange, ) - actual = AnalyticsHubServiceClient.data_exchange_path(project, location, data_exchange) - assert expected == actual - - -def test_parse_data_exchange_path(): - expected = { - "project": "octopus", - "location": "oyster", - "data_exchange": "nudibranch", - } - path = AnalyticsHubServiceClient.data_exchange_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_data_exchange_path(path) - assert expected == actual - -def test_dataset_path(): - project = "cuttlefish" - dataset = "mussel" - expected = "projects/{project}/datasets/{dataset}".format(project=project, dataset=dataset, ) - actual = AnalyticsHubServiceClient.dataset_path(project, dataset) - assert expected == actual - - -def test_parse_dataset_path(): - expected = { - "project": "winkle", - "dataset": "nautilus", - } - path = AnalyticsHubServiceClient.dataset_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_dataset_path(path) - assert expected == actual - -def test_listing_path(): - project = "scallop" - location = "abalone" - data_exchange = "squid" - listing = "clam" - expected = "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format(project=project, location=location, data_exchange=data_exchange, listing=listing, ) - actual = AnalyticsHubServiceClient.listing_path(project, location, data_exchange, listing) - assert expected == actual - - -def test_parse_listing_path(): - expected = { - "project": "whelk", - "location": "octopus", - "data_exchange": "oyster", - "listing": "nudibranch", - } - path = AnalyticsHubServiceClient.listing_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_listing_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = AnalyticsHubServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = AnalyticsHubServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = AnalyticsHubServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = AnalyticsHubServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = AnalyticsHubServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = AnalyticsHubServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = AnalyticsHubServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = AnalyticsHubServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = AnalyticsHubServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = AnalyticsHubServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = AnalyticsHubServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.AnalyticsHubServiceTransport, '_prep_wrapped_messages') as prep: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.AnalyticsHubServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = AnalyticsHubServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_list_locations(transport: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = AnalyticsHubServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = AnalyticsHubServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (AnalyticsHubServiceClient, transports.AnalyticsHubServiceGrpcTransport), - (AnalyticsHubServiceAsyncClient, transports.AnalyticsHubServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/.coveragerc deleted file mode 100644 index 0c3effcc3668..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_datapolicies/__init__.py - google/cloud/bigquery_datapolicies/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/.flake8 b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/MANIFEST.in deleted file mode 100644 index cd0d6efd1151..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_datapolicies *.py -recursive-include google/cloud/bigquery_datapolicies_v1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/README.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/README.rst deleted file mode 100644 index 074e1245b6d1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Datapolicies API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Datapolicies API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/data_policy_service.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/data_policy_service.rst deleted file mode 100644 index 9c23456d5fe3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/data_policy_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataPolicyService ------------------------------------ - -.. automodule:: google.cloud.bigquery_datapolicies_v1.services.data_policy_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_datapolicies_v1.services.data_policy_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/services_.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/services_.rst deleted file mode 100644 index 7c9f6284928f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Datapolicies v1 API -====================================================== -.. toctree:: - :maxdepth: 2 - - data_policy_service diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/types_.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/types_.rst deleted file mode 100644 index d9b14f8bac0c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/bigquery_datapolicies_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Datapolicies v1 API -=================================================== - -.. automodule:: google.cloud.bigquery_datapolicies_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/conf.py deleted file mode 100644 index ffabfdc34894..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-datapolicies documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-datapolicies" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-datapolicies-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-datapolicies.tex", - u"google-cloud-bigquery-datapolicies Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-datapolicies", - u"Google Cloud Bigquery Datapolicies Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-datapolicies", - u"google-cloud-bigquery-datapolicies Documentation", - author, - "google-cloud-bigquery-datapolicies", - "GAPIC library for Google Cloud Bigquery Datapolicies API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/index.rst deleted file mode 100644 index 188149101323..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_datapolicies_v1/services_ - bigquery_datapolicies_v1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/__init__.py deleted file mode 100644 index f3e875736da0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_datapolicies import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service.client import DataPolicyServiceClient -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service.async_client import DataPolicyServiceAsyncClient - -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import CreateDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import DataMaskingPolicy -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import DataPolicy -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import DeleteDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import GetDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import ListDataPoliciesRequest -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import ListDataPoliciesResponse -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import RenameDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1.types.datapolicy import UpdateDataPolicyRequest - -__all__ = ('DataPolicyServiceClient', - 'DataPolicyServiceAsyncClient', - 'CreateDataPolicyRequest', - 'DataMaskingPolicy', - 'DataPolicy', - 'DeleteDataPolicyRequest', - 'GetDataPolicyRequest', - 'ListDataPoliciesRequest', - 'ListDataPoliciesResponse', - 'RenameDataPolicyRequest', - 'UpdateDataPolicyRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/py.typed b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/py.typed deleted file mode 100644 index ff02bfbdb46b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-datapolicies package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/__init__.py deleted file mode 100644 index cc4afc306077..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_datapolicies_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.data_policy_service import DataPolicyServiceClient -from .services.data_policy_service import DataPolicyServiceAsyncClient - -from .types.datapolicy import CreateDataPolicyRequest -from .types.datapolicy import DataMaskingPolicy -from .types.datapolicy import DataPolicy -from .types.datapolicy import DeleteDataPolicyRequest -from .types.datapolicy import GetDataPolicyRequest -from .types.datapolicy import ListDataPoliciesRequest -from .types.datapolicy import ListDataPoliciesResponse -from .types.datapolicy import RenameDataPolicyRequest -from .types.datapolicy import UpdateDataPolicyRequest - -__all__ = ( - 'DataPolicyServiceAsyncClient', -'CreateDataPolicyRequest', -'DataMaskingPolicy', -'DataPolicy', -'DataPolicyServiceClient', -'DeleteDataPolicyRequest', -'GetDataPolicyRequest', -'ListDataPoliciesRequest', -'ListDataPoliciesResponse', -'RenameDataPolicyRequest', -'UpdateDataPolicyRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/gapic_metadata.json deleted file mode 100644 index 1ce28d57fccc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/gapic_metadata.json +++ /dev/null @@ -1,163 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_datapolicies_v1", - "protoPackage": "google.cloud.bigquery.datapolicies.v1", - "schema": "1.0", - "services": { - "DataPolicyService": { - "clients": { - "grpc": { - "libraryClient": "DataPolicyServiceClient", - "rpcs": { - "CreateDataPolicy": { - "methods": [ - "create_data_policy" - ] - }, - "DeleteDataPolicy": { - "methods": [ - "delete_data_policy" - ] - }, - "GetDataPolicy": { - "methods": [ - "get_data_policy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListDataPolicies": { - "methods": [ - "list_data_policies" - ] - }, - "RenameDataPolicy": { - "methods": [ - "rename_data_policy" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataPolicy": { - "methods": [ - "update_data_policy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataPolicyServiceAsyncClient", - "rpcs": { - "CreateDataPolicy": { - "methods": [ - "create_data_policy" - ] - }, - "DeleteDataPolicy": { - "methods": [ - "delete_data_policy" - ] - }, - "GetDataPolicy": { - "methods": [ - "get_data_policy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListDataPolicies": { - "methods": [ - "list_data_policies" - ] - }, - "RenameDataPolicy": { - "methods": [ - "rename_data_policy" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataPolicy": { - "methods": [ - "update_data_policy" - ] - } - } - }, - "rest": { - "libraryClient": "DataPolicyServiceClient", - "rpcs": { - "CreateDataPolicy": { - "methods": [ - "create_data_policy" - ] - }, - "DeleteDataPolicy": { - "methods": [ - "delete_data_policy" - ] - }, - "GetDataPolicy": { - "methods": [ - "get_data_policy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListDataPolicies": { - "methods": [ - "list_data_policies" - ] - }, - "RenameDataPolicy": { - "methods": [ - "rename_data_policy" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataPolicy": { - "methods": [ - "update_data_policy" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/py.typed b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/py.typed deleted file mode 100644 index ff02bfbdb46b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-datapolicies package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/__init__.py deleted file mode 100644 index 45491be42598..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataPolicyServiceClient -from .async_client import DataPolicyServiceAsyncClient - -__all__ = ( - 'DataPolicyServiceClient', - 'DataPolicyServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py deleted file mode 100644 index 3a1109630e4f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py +++ /dev/null @@ -1,1273 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_datapolicies_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service import pagers -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataPolicyServiceGrpcAsyncIOTransport -from .client import DataPolicyServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DataPolicyServiceAsyncClient: - """Data Policy Service provides APIs for managing the - label-policy bindings. - """ - - _client: DataPolicyServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataPolicyServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataPolicyServiceClient._DEFAULT_UNIVERSE - - data_policy_path = staticmethod(DataPolicyServiceClient.data_policy_path) - parse_data_policy_path = staticmethod(DataPolicyServiceClient.parse_data_policy_path) - common_billing_account_path = staticmethod(DataPolicyServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataPolicyServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataPolicyServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataPolicyServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataPolicyServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataPolicyServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataPolicyServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataPolicyServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataPolicyServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataPolicyServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceAsyncClient: The constructed client. - """ - return DataPolicyServiceClient.from_service_account_info.__func__(DataPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceAsyncClient: The constructed client. - """ - return DataPolicyServiceClient.from_service_account_file.__func__(DataPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataPolicyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataPolicyServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataPolicyServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataPolicyServiceTransport, Callable[..., DataPolicyServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data policy service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataPolicyServiceTransport,Callable[..., DataPolicyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataPolicyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataPolicyServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.datapolicies_v1.DataPolicyServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "credentialsType": None, - } - ) - - async def create_data_policy(self, - request: Optional[Union[datapolicy.CreateDataPolicyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_policy: Optional[datapolicy.DataPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - async def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = await client.create_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1.types.CreateDataPolicyRequest, dict]]): - The request object. Request message for the - CreateDataPolicy method. - parent (:class:`str`): - Required. Resource name of the project that the data - policy will belong to. The format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_policy (:class:`google.cloud.bigquery_datapolicies_v1.types.DataPolicy`): - Required. The data policy to create. The ``name`` field - does not need to be provided for the data policy - creation. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_policy] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.CreateDataPolicyRequest): - request = datapolicy.CreateDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_policy is not None: - request.data_policy = data_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_data_policy(self, - request: Optional[Union[datapolicy.UpdateDataPolicyRequest, dict]] = None, - *, - data_policy: Optional[datapolicy.DataPolicy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - async def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = await client.update_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1.types.UpdateDataPolicyRequest, dict]]): - The request object. Response message for the - UpdateDataPolicy method. - data_policy (:class:`google.cloud.bigquery_datapolicies_v1.types.DataPolicy`): - Required. Update the data policy's metadata. - - The target data policy is determined by the ``name`` - field. Other fields are updated to the specified values - based on the field masks. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - Updates to the ``name`` and ``dataPolicyId`` fields are - not allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_policy, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.UpdateDataPolicyRequest): - request = datapolicy.UpdateDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_policy is not None: - request.data_policy = data_policy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_policy.name", request.data_policy.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def rename_data_policy(self, - request: Optional[Union[datapolicy.RenameDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - new_data_policy_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Renames the id (display name) of the specified data - policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - async def sample_rename_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.RenameDataPolicyRequest( - name="name_value", - new_data_policy_id="new_data_policy_id_value", - ) - - # Make the request - response = await client.rename_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1.types.RenameDataPolicyRequest, dict]]): - The request object. Request message for the - RenameDataPolicy method. - name (:class:`str`): - Required. Resource name of the data policy to rename. - The format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_data_policy_id (:class:`str`): - Required. The new data policy id. - This corresponds to the ``new_data_policy_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_data_policy_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.RenameDataPolicyRequest): - request = datapolicy.RenameDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_data_policy_id is not None: - request.new_data_policy_id = new_data_policy_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.rename_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_data_policy(self, - request: Optional[Union[datapolicy.DeleteDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the data policy specified by its resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - async def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_policy(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1.types.DeleteDataPolicyRequest, dict]]): - The request object. Request message for the - DeleteDataPolicy method. - name (:class:`str`): - Required. Resource name of the data policy to delete. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.DeleteDataPolicyRequest): - request = datapolicy.DeleteDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_data_policy(self, - request: Optional[Union[datapolicy.GetDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Gets the data policy specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - async def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1.types.GetDataPolicyRequest, dict]]): - The request object. Request message for the GetDataPolicy - method. - name (:class:`str`): - Required. Resource name of the requested data policy. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.GetDataPolicyRequest): - request = datapolicy.GetDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_policies(self, - request: Optional[Union[datapolicy.ListDataPoliciesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataPoliciesAsyncPager: - r"""List all of the data policies in the specified parent - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - async def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesRequest, dict]]): - The request object. Request message for the - ListDataPolicies method. - parent (:class:`str`): - Required. Resource name of the project for which to list - data policies. Format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.services.data_policy_service.pagers.ListDataPoliciesAsyncPager: - Response message for the - ListDataPolicies method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.ListDataPoliciesRequest): - request = datapolicy.ListDataPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permission on the specified data - policy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataPolicyServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataPolicyServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py deleted file mode 100644 index fe6baf050d75..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ /dev/null @@ -1,1632 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_datapolicies_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service import pagers -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataPolicyServiceGrpcTransport -from .transports.grpc_asyncio import DataPolicyServiceGrpcAsyncIOTransport -from .transports.rest import DataPolicyServiceRestTransport - - -class DataPolicyServiceClientMeta(type): - """Metaclass for the DataPolicyService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataPolicyServiceTransport]] - _transport_registry["grpc"] = DataPolicyServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataPolicyServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DataPolicyServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataPolicyServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataPolicyServiceClient(metaclass=DataPolicyServiceClientMeta): - """Data Policy Service provides APIs for managing the - label-policy bindings. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigquerydatapolicy.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatapolicy.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataPolicyServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_policy_path(project: str,location: str,data_policy: str,) -> str: - """Returns a fully-qualified data_policy string.""" - return "projects/{project}/locations/{location}/dataPolicies/{data_policy}".format(project=project, location=location, data_policy=data_policy, ) - - @staticmethod - def parse_data_policy_path(path: str) -> Dict[str,str]: - """Parses a data_policy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataPolicies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataPolicyServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataPolicyServiceTransport, Callable[..., DataPolicyServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data policy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataPolicyServiceTransport,Callable[..., DataPolicyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataPolicyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataPolicyServiceClient._read_environment_variables() - self._client_cert_source = DataPolicyServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataPolicyServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataPolicyServiceTransport) - if transport_provided: - # transport is a DataPolicyServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataPolicyServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataPolicyServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataPolicyServiceTransport], Callable[..., DataPolicyServiceTransport]] = ( - DataPolicyServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataPolicyServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "credentialsType": None, - } - ) - - def create_data_policy(self, - request: Optional[Union[datapolicy.CreateDataPolicyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_policy: Optional[datapolicy.DataPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = client.create_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1.types.CreateDataPolicyRequest, dict]): - The request object. Request message for the - CreateDataPolicy method. - parent (str): - Required. Resource name of the project that the data - policy will belong to. The format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_policy (google.cloud.bigquery_datapolicies_v1.types.DataPolicy): - Required. The data policy to create. The ``name`` field - does not need to be provided for the data policy - creation. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_policy] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.CreateDataPolicyRequest): - request = datapolicy.CreateDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_policy is not None: - request.data_policy = data_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_data_policy(self, - request: Optional[Union[datapolicy.UpdateDataPolicyRequest, dict]] = None, - *, - data_policy: Optional[datapolicy.DataPolicy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = client.update_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1.types.UpdateDataPolicyRequest, dict]): - The request object. Response message for the - UpdateDataPolicy method. - data_policy (google.cloud.bigquery_datapolicies_v1.types.DataPolicy): - Required. Update the data policy's metadata. - - The target data policy is determined by the ``name`` - field. Other fields are updated to the specified values - based on the field masks. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - Updates to the ``name`` and ``dataPolicyId`` fields are - not allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_policy, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.UpdateDataPolicyRequest): - request = datapolicy.UpdateDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_policy is not None: - request.data_policy = data_policy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_policy.name", request.data_policy.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def rename_data_policy(self, - request: Optional[Union[datapolicy.RenameDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - new_data_policy_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Renames the id (display name) of the specified data - policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - def sample_rename_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.RenameDataPolicyRequest( - name="name_value", - new_data_policy_id="new_data_policy_id_value", - ) - - # Make the request - response = client.rename_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1.types.RenameDataPolicyRequest, dict]): - The request object. Request message for the - RenameDataPolicy method. - name (str): - Required. Resource name of the data policy to rename. - The format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - new_data_policy_id (str): - Required. The new data policy id. - This corresponds to the ``new_data_policy_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, new_data_policy_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.RenameDataPolicyRequest): - request = datapolicy.RenameDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if new_data_policy_id is not None: - request.new_data_policy_id = new_data_policy_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.rename_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_data_policy(self, - request: Optional[Union[datapolicy.DeleteDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the data policy specified by its resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - client.delete_data_policy(request=request) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1.types.DeleteDataPolicyRequest, dict]): - The request object. Request message for the - DeleteDataPolicy method. - name (str): - Required. Resource name of the data policy to delete. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.DeleteDataPolicyRequest): - request = datapolicy.DeleteDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_data_policy(self, - request: Optional[Union[datapolicy.GetDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Gets the data policy specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1.types.GetDataPolicyRequest, dict]): - The request object. Request message for the GetDataPolicy - method. - name (str): - Required. Resource name of the requested data policy. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.GetDataPolicyRequest): - request = datapolicy.GetDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_policies(self, - request: Optional[Union[datapolicy.ListDataPoliciesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataPoliciesPager: - r"""List all of the data policies in the specified parent - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - - def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesRequest, dict]): - The request object. Request message for the - ListDataPolicies method. - parent (str): - Required. Resource name of the project for which to list - data policies. Format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1.services.data_policy_service.pagers.ListDataPoliciesPager: - Response message for the - ListDataPolicies method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.ListDataPoliciesRequest): - request = datapolicy.ListDataPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataPoliciesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permission on the specified data - policy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataPolicyServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataPolicyServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/pagers.py deleted file mode 100644 index 438a9bcf80b8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_datapolicies_v1.types import datapolicy - - -class ListDataPoliciesPager: - """A pager for iterating through ``list_data_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_policies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataPolicies`` requests and continue to iterate - through the ``data_policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datapolicy.ListDataPoliciesResponse], - request: datapolicy.ListDataPoliciesRequest, - response: datapolicy.ListDataPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesRequest): - The initial request object. - response (google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datapolicy.ListDataPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datapolicy.ListDataPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datapolicy.DataPolicy]: - for page in self.pages: - yield from page.data_policies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataPoliciesAsyncPager: - """A pager for iterating through ``list_data_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_policies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataPolicies`` requests and continue to iterate - through the ``data_policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datapolicy.ListDataPoliciesResponse]], - request: datapolicy.ListDataPoliciesRequest, - response: datapolicy.ListDataPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesRequest): - The initial request object. - response (google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datapolicy.ListDataPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datapolicy.ListDataPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datapolicy.DataPolicy]: - async def async_generator(): - async for page in self.pages: - for response in page.data_policies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/README.rst deleted file mode 100644 index b83d156af245..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataPolicyServiceTransport` is the ABC for all transports. -- public child `DataPolicyServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataPolicyServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataPolicyServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataPolicyServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/__init__.py deleted file mode 100644 index cb00703f283e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataPolicyServiceTransport -from .grpc import DataPolicyServiceGrpcTransport -from .grpc_asyncio import DataPolicyServiceGrpcAsyncIOTransport -from .rest import DataPolicyServiceRestTransport -from .rest import DataPolicyServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataPolicyServiceTransport]] -_transport_registry['grpc'] = DataPolicyServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataPolicyServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DataPolicyServiceRestTransport - -__all__ = ( - 'DataPolicyServiceTransport', - 'DataPolicyServiceGrpcTransport', - 'DataPolicyServiceGrpcAsyncIOTransport', - 'DataPolicyServiceRestTransport', - 'DataPolicyServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/base.py deleted file mode 100644 index 6bc00cebb899..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/base.py +++ /dev/null @@ -1,351 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_datapolicies_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataPolicyServiceTransport(abc.ABC): - """Abstract transport class for DataPolicyService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigquerydatapolicy.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_data_policy: gapic_v1.method.wrap_method( - self.create_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_data_policy: gapic_v1.method.wrap_method( - self.update_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.rename_data_policy: gapic_v1.method.wrap_method( - self.rename_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_data_policy: gapic_v1.method.wrap_method( - self.delete_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_data_policy: gapic_v1.method.wrap_method( - self.get_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_data_policies: gapic_v1.method.wrap_method( - self.list_data_policies, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def rename_data_policy(self) -> Callable[ - [datapolicy.RenameDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - Union[ - datapolicy.ListDataPoliciesResponse, - Awaitable[datapolicy.ListDataPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataPolicyServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/grpc.py deleted file mode 100644 index 5d10c8971b09..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/grpc.py +++ /dev/null @@ -1,567 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataPolicyServiceGrpcTransport(DataPolicyServiceTransport): - """gRPC backend transport for DataPolicyService. - - Data Policy Service provides APIs for managing the - label-policy bindings. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the create data policy method over gRPC. - - Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - Returns: - Callable[[~.CreateDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_policy' not in self._stubs: - self._stubs['create_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/CreateDataPolicy', - request_serializer=datapolicy.CreateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['create_data_policy'] - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the update data policy method over gRPC. - - Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - Returns: - Callable[[~.UpdateDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_policy' not in self._stubs: - self._stubs['update_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/UpdateDataPolicy', - request_serializer=datapolicy.UpdateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['update_data_policy'] - - @property - def rename_data_policy(self) -> Callable[ - [datapolicy.RenameDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the rename data policy method over gRPC. - - Renames the id (display name) of the specified data - policy. - - Returns: - Callable[[~.RenameDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_data_policy' not in self._stubs: - self._stubs['rename_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/RenameDataPolicy', - request_serializer=datapolicy.RenameDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['rename_data_policy'] - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete data policy method over gRPC. - - Deletes the data policy specified by its resource - name. - - Returns: - Callable[[~.DeleteDataPolicyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_policy' not in self._stubs: - self._stubs['delete_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/DeleteDataPolicy', - request_serializer=datapolicy.DeleteDataPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_policy'] - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the get data policy method over gRPC. - - Gets the data policy specified by its resource name. - - Returns: - Callable[[~.GetDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_policy' not in self._stubs: - self._stubs['get_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/GetDataPolicy', - request_serializer=datapolicy.GetDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['get_data_policy'] - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - datapolicy.ListDataPoliciesResponse]: - r"""Return a callable for the list data policies method over gRPC. - - List all of the data policies in the specified parent - project. - - Returns: - Callable[[~.ListDataPoliciesRequest], - ~.ListDataPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_policies' not in self._stubs: - self._stubs['list_data_policies'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/ListDataPolicies', - request_serializer=datapolicy.ListDataPoliciesRequest.serialize, - response_deserializer=datapolicy.ListDataPoliciesResponse.deserialize, - ) - return self._stubs['list_data_policies'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for the specified data policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for the specified data policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permission on the specified data - policy resource. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataPolicyServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/grpc_asyncio.py deleted file mode 100644 index c06b75a4eb3c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,708 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataPolicyServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataPolicyServiceGrpcAsyncIOTransport(DataPolicyServiceTransport): - """gRPC AsyncIO backend transport for DataPolicyService. - - Data Policy Service provides APIs for managing the - label-policy bindings. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the create data policy method over gRPC. - - Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - Returns: - Callable[[~.CreateDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_policy' not in self._stubs: - self._stubs['create_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/CreateDataPolicy', - request_serializer=datapolicy.CreateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['create_data_policy'] - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the update data policy method over gRPC. - - Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - Returns: - Callable[[~.UpdateDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_policy' not in self._stubs: - self._stubs['update_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/UpdateDataPolicy', - request_serializer=datapolicy.UpdateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['update_data_policy'] - - @property - def rename_data_policy(self) -> Callable[ - [datapolicy.RenameDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the rename data policy method over gRPC. - - Renames the id (display name) of the specified data - policy. - - Returns: - Callable[[~.RenameDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'rename_data_policy' not in self._stubs: - self._stubs['rename_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/RenameDataPolicy', - request_serializer=datapolicy.RenameDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['rename_data_policy'] - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete data policy method over gRPC. - - Deletes the data policy specified by its resource - name. - - Returns: - Callable[[~.DeleteDataPolicyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_policy' not in self._stubs: - self._stubs['delete_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/DeleteDataPolicy', - request_serializer=datapolicy.DeleteDataPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_policy'] - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the get data policy method over gRPC. - - Gets the data policy specified by its resource name. - - Returns: - Callable[[~.GetDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_policy' not in self._stubs: - self._stubs['get_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/GetDataPolicy', - request_serializer=datapolicy.GetDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['get_data_policy'] - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - Awaitable[datapolicy.ListDataPoliciesResponse]]: - r"""Return a callable for the list data policies method over gRPC. - - List all of the data policies in the specified parent - project. - - Returns: - Callable[[~.ListDataPoliciesRequest], - Awaitable[~.ListDataPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_policies' not in self._stubs: - self._stubs['list_data_policies'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/ListDataPolicies', - request_serializer=datapolicy.ListDataPoliciesRequest.serialize, - response_deserializer=datapolicy.ListDataPoliciesResponse.deserialize, - ) - return self._stubs['list_data_policies'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for the specified data policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for the specified data policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permission on the specified data - policy resource. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1.DataPolicyService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_data_policy: self._wrap_method( - self.create_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_data_policy: self._wrap_method( - self.update_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.rename_data_policy: self._wrap_method( - self.rename_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_data_policy: self._wrap_method( - self.delete_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_data_policy: self._wrap_method( - self.get_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_data_policies: self._wrap_method( - self.list_data_policies, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'DataPolicyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py deleted file mode 100644 index bcbb9ffe6b25..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest.py +++ /dev/null @@ -1,1829 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseDataPolicyServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class DataPolicyServiceRestInterceptor: - """Interceptor for DataPolicyService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DataPolicyServiceRestTransport. - - .. code-block:: python - class MyCustomDataPolicyServiceInterceptor(DataPolicyServiceRestInterceptor): - def pre_create_data_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_data_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_data_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_data_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_policies(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_policies(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_rename_data_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_rename_data_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_set_iam_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_set_iam_policy(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_test_iam_permissions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_test_iam_permissions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_data_policy(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_data_policy(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DataPolicyServiceRestTransport(interceptor=MyCustomDataPolicyServiceInterceptor()) - client = DataPolicyServiceClient(transport=transport) - - - """ - def pre_create_data_policy(self, request: datapolicy.CreateDataPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.CreateDataPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_data_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_create_data_policy(self, response: datapolicy.DataPolicy) -> datapolicy.DataPolicy: - """Post-rpc interceptor for create_data_policy - - DEPRECATED. Please use the `post_create_data_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_create_data_policy` interceptor runs - before the `post_create_data_policy_with_metadata` interceptor. - """ - return response - - def post_create_data_policy_with_metadata(self, response: datapolicy.DataPolicy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_data_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_create_data_policy_with_metadata` - interceptor in new development instead of the `post_create_data_policy` interceptor. - When both interceptors are used, this `post_create_data_policy_with_metadata` interceptor runs after the - `post_create_data_policy` interceptor. The (possibly modified) response returned by - `post_create_data_policy` will be passed to - `post_create_data_policy_with_metadata`. - """ - return response, metadata - - def pre_delete_data_policy(self, request: datapolicy.DeleteDataPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.DeleteDataPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_data_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def pre_get_data_policy(self, request: datapolicy.GetDataPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.GetDataPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_get_data_policy(self, response: datapolicy.DataPolicy) -> datapolicy.DataPolicy: - """Post-rpc interceptor for get_data_policy - - DEPRECATED. Please use the `post_get_data_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_get_data_policy` interceptor runs - before the `post_get_data_policy_with_metadata` interceptor. - """ - return response - - def post_get_data_policy_with_metadata(self, response: datapolicy.DataPolicy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_get_data_policy_with_metadata` - interceptor in new development instead of the `post_get_data_policy` interceptor. - When both interceptors are used, this `post_get_data_policy_with_metadata` interceptor runs after the - `post_get_data_policy` interceptor. The (possibly modified) response returned by - `post_get_data_policy` will be passed to - `post_get_data_policy_with_metadata`. - """ - return response, metadata - - def pre_get_iam_policy(self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - DEPRECATED. Please use the `post_get_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_get_iam_policy` interceptor runs - before the `post_get_iam_policy_with_metadata` interceptor. - """ - return response - - def post_get_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_get_iam_policy_with_metadata` - interceptor in new development instead of the `post_get_iam_policy` interceptor. - When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the - `post_get_iam_policy` interceptor. The (possibly modified) response returned by - `post_get_iam_policy` will be passed to - `post_get_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_list_data_policies(self, request: datapolicy.ListDataPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.ListDataPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_policies - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_list_data_policies(self, response: datapolicy.ListDataPoliciesResponse) -> datapolicy.ListDataPoliciesResponse: - """Post-rpc interceptor for list_data_policies - - DEPRECATED. Please use the `post_list_data_policies_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_list_data_policies` interceptor runs - before the `post_list_data_policies_with_metadata` interceptor. - """ - return response - - def post_list_data_policies_with_metadata(self, response: datapolicy.ListDataPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.ListDataPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_policies - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_list_data_policies_with_metadata` - interceptor in new development instead of the `post_list_data_policies` interceptor. - When both interceptors are used, this `post_list_data_policies_with_metadata` interceptor runs after the - `post_list_data_policies` interceptor. The (possibly modified) response returned by - `post_list_data_policies` will be passed to - `post_list_data_policies_with_metadata`. - """ - return response, metadata - - def pre_rename_data_policy(self, request: datapolicy.RenameDataPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.RenameDataPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for rename_data_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_rename_data_policy(self, response: datapolicy.DataPolicy) -> datapolicy.DataPolicy: - """Post-rpc interceptor for rename_data_policy - - DEPRECATED. Please use the `post_rename_data_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_rename_data_policy` interceptor runs - before the `post_rename_data_policy_with_metadata` interceptor. - """ - return response - - def post_rename_data_policy_with_metadata(self, response: datapolicy.DataPolicy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for rename_data_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_rename_data_policy_with_metadata` - interceptor in new development instead of the `post_rename_data_policy` interceptor. - When both interceptors are used, this `post_rename_data_policy_with_metadata` interceptor runs after the - `post_rename_data_policy` interceptor. The (possibly modified) response returned by - `post_rename_data_policy` will be passed to - `post_rename_data_policy_with_metadata`. - """ - return response, metadata - - def pre_set_iam_policy(self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - DEPRECATED. Please use the `post_set_iam_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_set_iam_policy` interceptor runs - before the `post_set_iam_policy_with_metadata` interceptor. - """ - return response - - def post_set_iam_policy_with_metadata(self, response: policy_pb2.Policy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_set_iam_policy_with_metadata` - interceptor in new development instead of the `post_set_iam_policy` interceptor. - When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the - `post_set_iam_policy` interceptor. The (possibly modified) response returned by - `post_set_iam_policy` will be passed to - `post_set_iam_policy_with_metadata`. - """ - return response, metadata - - def pre_test_iam_permissions(self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_test_iam_permissions(self, response: iam_policy_pb2.TestIamPermissionsResponse) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_test_iam_permissions` interceptor runs - before the `post_test_iam_permissions_with_metadata` interceptor. - """ - return response - - def post_test_iam_permissions_with_metadata(self, response: iam_policy_pb2.TestIamPermissionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[iam_policy_pb2.TestIamPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_test_iam_permissions_with_metadata` - interceptor in new development instead of the `post_test_iam_permissions` interceptor. - When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the - `post_test_iam_permissions` interceptor. The (possibly modified) response returned by - `post_test_iam_permissions` will be passed to - `post_test_iam_permissions_with_metadata`. - """ - return response, metadata - - def pre_update_data_policy(self, request: datapolicy.UpdateDataPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.UpdateDataPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_data_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataPolicyService server. - """ - return request, metadata - - def post_update_data_policy(self, response: datapolicy.DataPolicy) -> datapolicy.DataPolicy: - """Post-rpc interceptor for update_data_policy - - DEPRECATED. Please use the `post_update_data_policy_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataPolicyService server but before - it is returned to user code. This `post_update_data_policy` interceptor runs - before the `post_update_data_policy_with_metadata` interceptor. - """ - return response - - def post_update_data_policy_with_metadata(self, response: datapolicy.DataPolicy, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datapolicy.DataPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_policy - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataPolicyService server but before it is returned to user code. - - We recommend only using this `post_update_data_policy_with_metadata` - interceptor in new development instead of the `post_update_data_policy` interceptor. - When both interceptors are used, this `post_update_data_policy_with_metadata` interceptor runs after the - `post_update_data_policy` interceptor. The (possibly modified) response returned by - `post_update_data_policy` will be passed to - `post_update_data_policy_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class DataPolicyServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DataPolicyServiceRestInterceptor - - -class DataPolicyServiceRestTransport(_BaseDataPolicyServiceRestTransport): - """REST backend synchronous transport for DataPolicyService. - - Data Policy Service provides APIs for managing the - label-policy bindings. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DataPolicyServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DataPolicyServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateDataPolicy(_BaseDataPolicyServiceRestTransport._BaseCreateDataPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.CreateDataPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datapolicy.CreateDataPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datapolicy.DataPolicy: - r"""Call the create data policy method over HTTP. - - Args: - request (~.datapolicy.CreateDataPolicyRequest): - The request object. Request message for the - CreateDataPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datapolicy.DataPolicy: - Represents the label-policy binding. - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseCreateDataPolicy._get_http_options() - - request, metadata = self._interceptor.pre_create_data_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseCreateDataPolicy._get_transcoded_request(http_options, request) - - body = _BaseDataPolicyServiceRestTransport._BaseCreateDataPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseCreateDataPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.CreateDataPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "CreateDataPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._CreateDataPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datapolicy.DataPolicy() - pb_resp = datapolicy.DataPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_data_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_data_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datapolicy.DataPolicy.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.create_data_policy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "CreateDataPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteDataPolicy(_BaseDataPolicyServiceRestTransport._BaseDeleteDataPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.DeleteDataPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datapolicy.DeleteDataPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete data policy method over HTTP. - - Args: - request (~.datapolicy.DeleteDataPolicyRequest): - The request object. Request message for the - DeleteDataPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseDeleteDataPolicy._get_http_options() - - request, metadata = self._interceptor.pre_delete_data_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseDeleteDataPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseDeleteDataPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.DeleteDataPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "DeleteDataPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._DeleteDataPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDataPolicy(_BaseDataPolicyServiceRestTransport._BaseGetDataPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.GetDataPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datapolicy.GetDataPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datapolicy.DataPolicy: - r"""Call the get data policy method over HTTP. - - Args: - request (~.datapolicy.GetDataPolicyRequest): - The request object. Request message for the GetDataPolicy - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datapolicy.DataPolicy: - Represents the label-policy binding. - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseGetDataPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_data_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseGetDataPolicy._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseGetDataPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.GetDataPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "GetDataPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._GetDataPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datapolicy.DataPolicy() - pb_resp = datapolicy.DataPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datapolicy.DataPolicy.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.get_data_policy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "GetDataPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetIamPolicy(_BaseDataPolicyServiceRestTransport._BaseGetIamPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.GetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.GetIamPolicyRequest): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseGetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseDataPolicyServiceRestTransport._BaseGetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.GetIamPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "GetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.get_iam_policy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataPolicies(_BaseDataPolicyServiceRestTransport._BaseListDataPolicies, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.ListDataPolicies") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datapolicy.ListDataPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datapolicy.ListDataPoliciesResponse: - r"""Call the list data policies method over HTTP. - - Args: - request (~.datapolicy.ListDataPoliciesRequest): - The request object. Request message for the - ListDataPolicies method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datapolicy.ListDataPoliciesResponse: - Response message for the - ListDataPolicies method. - - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseListDataPolicies._get_http_options() - - request, metadata = self._interceptor.pre_list_data_policies(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseListDataPolicies._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseListDataPolicies._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.ListDataPolicies", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "ListDataPolicies", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._ListDataPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datapolicy.ListDataPoliciesResponse() - pb_resp = datapolicy.ListDataPoliciesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_policies(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datapolicy.ListDataPoliciesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.list_data_policies", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "ListDataPolicies", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _RenameDataPolicy(_BaseDataPolicyServiceRestTransport._BaseRenameDataPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.RenameDataPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datapolicy.RenameDataPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datapolicy.DataPolicy: - r"""Call the rename data policy method over HTTP. - - Args: - request (~.datapolicy.RenameDataPolicyRequest): - The request object. Request message for the - RenameDataPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datapolicy.DataPolicy: - Represents the label-policy binding. - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseRenameDataPolicy._get_http_options() - - request, metadata = self._interceptor.pre_rename_data_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseRenameDataPolicy._get_transcoded_request(http_options, request) - - body = _BaseDataPolicyServiceRestTransport._BaseRenameDataPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseRenameDataPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.RenameDataPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "RenameDataPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._RenameDataPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datapolicy.DataPolicy() - pb_resp = datapolicy.DataPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_rename_data_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_rename_data_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datapolicy.DataPolicy.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.rename_data_policy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "RenameDataPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SetIamPolicy(_BaseDataPolicyServiceRestTransport._BaseSetIamPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.SetIamPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. - - Args: - request (~.iam_policy_pb2.SetIamPolicyRequest): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which - specifies access controls for Google Cloud resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members``, or - principals, to a single ``role``. Principals can be user - accounts, service accounts, Google groups, and domains - (such as G Suite). A ``role`` is a named list of - permissions; each ``role`` can be an IAM predefined role - or a user-created custom role. - - For some types of Google Cloud resources, a ``binding`` - can also specify a ``condition``, which is a logical - expression that allows access to a resource only if the - expression evaluates to ``true``. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the `IAM - documentation `__. - - **JSON example:** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": [ - "user:eve@example.com" - ], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ], - "etag": "BwWWja0YfJA=", - "version": 3 - } - - **YAML example:** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= - version: 3 - - For a description of IAM and its features, see the `IAM - documentation `__. - - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseSetIamPolicy._get_http_options() - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - - body = _BaseDataPolicyServiceRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.SetIamPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "SetIamPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = policy_pb2.Policy() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_set_iam_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_set_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.set_iam_policy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _TestIamPermissions(_BaseDataPolicyServiceRestTransport._BaseTestIamPermissions, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.TestIamPermissions") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. - - Args: - request (~.iam_policy_pb2.TestIamPermissionsRequest): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseTestIamPermissions._get_http_options() - - request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - - body = _BaseDataPolicyServiceRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.TestIamPermissions", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "TestIamPermissions", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = iam_policy_pb2.TestIamPermissionsResponse() - pb_resp = resp - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_test_iam_permissions(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_test_iam_permissions_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.test_iam_permissions", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateDataPolicy(_BaseDataPolicyServiceRestTransport._BaseUpdateDataPolicy, DataPolicyServiceRestStub): - def __hash__(self): - return hash("DataPolicyServiceRestTransport.UpdateDataPolicy") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datapolicy.UpdateDataPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datapolicy.DataPolicy: - r"""Call the update data policy method over HTTP. - - Args: - request (~.datapolicy.UpdateDataPolicyRequest): - The request object. Response message for the - UpdateDataPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datapolicy.DataPolicy: - Represents the label-policy binding. - """ - - http_options = _BaseDataPolicyServiceRestTransport._BaseUpdateDataPolicy._get_http_options() - - request, metadata = self._interceptor.pre_update_data_policy(request, metadata) - transcoded_request = _BaseDataPolicyServiceRestTransport._BaseUpdateDataPolicy._get_transcoded_request(http_options, request) - - body = _BaseDataPolicyServiceRestTransport._BaseUpdateDataPolicy._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataPolicyServiceRestTransport._BaseUpdateDataPolicy._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.UpdateDataPolicy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "UpdateDataPolicy", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataPolicyServiceRestTransport._UpdateDataPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datapolicy.DataPolicy() - pb_resp = datapolicy.DataPolicy.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_data_policy(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_data_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datapolicy.DataPolicy.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datapolicies_v1.DataPolicyServiceClient.update_data_policy", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "rpcName": "UpdateDataPolicy", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - datapolicy.DataPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateDataPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteDataPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - datapolicy.DataPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - datapolicy.ListDataPoliciesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataPolicies(self._session, self._host, self._interceptor) # type: ignore - - @property - def rename_data_policy(self) -> Callable[ - [datapolicy.RenameDataPolicyRequest], - datapolicy.DataPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._RenameDataPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - datapolicy.DataPolicy]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateDataPolicy(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DataPolicyServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest_base.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest_base.py deleted file mode 100644 index 11b8ab0ef92a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/transports/rest_base.py +++ /dev/null @@ -1,487 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseDataPolicyServiceRestTransport(DataPolicyServiceTransport): - """Base REST backend transport for DataPolicyService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateDataPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/dataPolicies', - 'body': 'data_policy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datapolicy.CreateDataPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseCreateDataPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteDataPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/dataPolicies/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datapolicy.DeleteDataPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseDeleteDataPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataPolicies/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datapolicy.GetDataPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseGetDataPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/dataPolicies/*}:getIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseGetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataPolicies: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dataPolicies', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datapolicy.ListDataPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseListDataPolicies._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseRenameDataPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/dataPolicies/*}:rename', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datapolicy.RenameDataPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseRenameDataPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSetIamPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/dataPolicies/*}:setIamPolicy', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseSetIamPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseTestIamPermissions: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/dataPolicies/*}:testIamPermissions', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = request - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseTestIamPermissions._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateDataPolicy: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{data_policy.name=projects/*/locations/*/dataPolicies/*}', - 'body': 'data_policy', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datapolicy.UpdateDataPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataPolicyServiceRestTransport._BaseUpdateDataPolicy._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseDataPolicyServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/types/__init__.py deleted file mode 100644 index feaae1902ef7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/types/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .datapolicy import ( - CreateDataPolicyRequest, - DataMaskingPolicy, - DataPolicy, - DeleteDataPolicyRequest, - GetDataPolicyRequest, - ListDataPoliciesRequest, - ListDataPoliciesResponse, - RenameDataPolicyRequest, - UpdateDataPolicyRequest, -) - -__all__ = ( - 'CreateDataPolicyRequest', - 'DataMaskingPolicy', - 'DataPolicy', - 'DeleteDataPolicyRequest', - 'GetDataPolicyRequest', - 'ListDataPoliciesRequest', - 'ListDataPoliciesResponse', - 'RenameDataPolicyRequest', - 'UpdateDataPolicyRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py deleted file mode 100644 index 64edf05388a3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/google/cloud/bigquery_datapolicies_v1/types/datapolicy.py +++ /dev/null @@ -1,413 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.datapolicies.v1', - manifest={ - 'CreateDataPolicyRequest', - 'UpdateDataPolicyRequest', - 'RenameDataPolicyRequest', - 'DeleteDataPolicyRequest', - 'GetDataPolicyRequest', - 'ListDataPoliciesRequest', - 'ListDataPoliciesResponse', - 'DataPolicy', - 'DataMaskingPolicy', - }, -) - - -class CreateDataPolicyRequest(proto.Message): - r"""Request message for the CreateDataPolicy method. - - Attributes: - parent (str): - Required. Resource name of the project that the data policy - will belong to. The format is - ``projects/{project_number}/locations/{location_id}``. - data_policy (google.cloud.bigquery_datapolicies_v1.types.DataPolicy): - Required. The data policy to create. The ``name`` field does - not need to be provided for the data policy creation. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_policy: 'DataPolicy' = proto.Field( - proto.MESSAGE, - number=2, - message='DataPolicy', - ) - - -class UpdateDataPolicyRequest(proto.Message): - r"""Response message for the UpdateDataPolicy method. - - Attributes: - data_policy (google.cloud.bigquery_datapolicies_v1.types.DataPolicy): - Required. Update the data policy's metadata. - - The target data policy is determined by the ``name`` field. - Other fields are updated to the specified values based on - the field masks. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - - Updates to the ``name`` and ``dataPolicyId`` fields are not - allowed. - """ - - data_policy: 'DataPolicy' = proto.Field( - proto.MESSAGE, - number=1, - message='DataPolicy', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class RenameDataPolicyRequest(proto.Message): - r"""Request message for the RenameDataPolicy method. - - Attributes: - name (str): - Required. Resource name of the data policy to rename. The - format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`` - new_data_policy_id (str): - Required. The new data policy id. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - new_data_policy_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDataPolicyRequest(proto.Message): - r"""Request message for the DeleteDataPolicy method. - - Attributes: - name (str): - Required. Resource name of the data policy to delete. Format - is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDataPolicyRequest(proto.Message): - r"""Request message for the GetDataPolicy method. - - Attributes: - name (str): - Required. Resource name of the requested data policy. Format - is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataPoliciesRequest(proto.Message): - r"""Request message for the ListDataPolicies method. - - Attributes: - parent (str): - Required. Resource name of the project for which to list - data policies. Format is - ``projects/{project_number}/locations/{location_id}``. - page_size (int): - The maximum number of data policies to - return. Must be a value between 1 and 1000. - If not set, defaults to 50. - page_token (str): - The ``nextPageToken`` value returned from a previous list - request, if any. If not set, defaults to an empty string. - filter (str): - Filters the data policies by policy tags that they are - associated with. Currently filter only supports - "policy\_tag" based filtering and OR based predicates. - Sample filter can be "policy\_tag: - projects/1/locations/us/taxonomies/2/policyTags/3". You may - also use wildcard such as "policy\_tag: - projects/1/locations/us/taxonomies/2*". Please note that OR - predicates cannot be used with wildcard filters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListDataPoliciesResponse(proto.Message): - r"""Response message for the ListDataPolicies method. - - Attributes: - data_policies (MutableSequence[google.cloud.bigquery_datapolicies_v1.types.DataPolicy]): - Data policies that belong to the requested - project. - next_page_token (str): - Token used to retrieve the next page of - results, or empty if there are no more results. - """ - - @property - def raw_page(self): - return self - - data_policies: MutableSequence['DataPolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataPolicy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DataPolicy(proto.Message): - r"""Represents the label-policy binding. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - policy_tag (str): - Policy tag resource name, in the format of - ``projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{policyTag_id}``. - - This field is a member of `oneof`_ ``matching_label``. - data_masking_policy (google.cloud.bigquery_datapolicies_v1.types.DataMaskingPolicy): - The data masking policy that specifies the - data masking rule to use. - - This field is a member of `oneof`_ ``policy``. - name (str): - Output only. Resource name of this data policy, in the - format of - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - data_policy_type (google.cloud.bigquery_datapolicies_v1.types.DataPolicy.DataPolicyType): - Type of data policy. - data_policy_id (str): - User-assigned (human readable) ID of the data policy that - needs to be unique within a project. Used as - {data_policy_id} in part of the resource name. - """ - class DataPolicyType(proto.Enum): - r"""A list of supported data policy types. - - Values: - DATA_POLICY_TYPE_UNSPECIFIED (0): - Default value for the data policy type. This - should not be used. - COLUMN_LEVEL_SECURITY_POLICY (3): - Used to create a data policy for column-level - security, without data masking. - DATA_MASKING_POLICY (2): - Used to create a data policy for data - masking. - """ - DATA_POLICY_TYPE_UNSPECIFIED = 0 - COLUMN_LEVEL_SECURITY_POLICY = 3 - DATA_MASKING_POLICY = 2 - - policy_tag: str = proto.Field( - proto.STRING, - number=4, - oneof='matching_label', - ) - data_masking_policy: 'DataMaskingPolicy' = proto.Field( - proto.MESSAGE, - number=5, - oneof='policy', - message='DataMaskingPolicy', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_policy_type: DataPolicyType = proto.Field( - proto.ENUM, - number=2, - enum=DataPolicyType, - ) - data_policy_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DataMaskingPolicy(proto.Message): - r"""The data masking policy that is used to specify data masking - rule. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - predefined_expression (google.cloud.bigquery_datapolicies_v1.types.DataMaskingPolicy.PredefinedExpression): - A predefined masking expression. - - This field is a member of `oneof`_ ``masking_expression``. - routine (str): - The name of the BigQuery routine that contains the custom - masking routine, in the format of - ``projects/{project_number}/datasets/{dataset_id}/routines/{routine_id}``. - - This field is a member of `oneof`_ ``masking_expression``. - """ - class PredefinedExpression(proto.Enum): - r"""The available masking rules. Learn more here: - https://cloud.google.com/bigquery/docs/column-data-masking-intro#masking_options. - - Values: - PREDEFINED_EXPRESSION_UNSPECIFIED (0): - Default, unspecified predefined expression. - No masking will take place since no expression - is specified. - SHA256 (3): - Masking expression to replace data with - SHA-256 hash. - ALWAYS_NULL (5): - Masking expression to replace data with - NULLs. - DEFAULT_MASKING_VALUE (7): - Masking expression to replace data with their default - masking values. The default masking values for each type - listed as below: - - - STRING: "" - - BYTES: b'' - - INTEGER: 0 - - FLOAT: 0.0 - - NUMERIC: 0 - - BOOLEAN: FALSE - - TIMESTAMP: 1970-01-01 00:00:00 UTC - - DATE: 1970-01-01 - - TIME: 00:00:00 - - DATETIME: 1970-01-01T00:00:00 - - GEOGRAPHY: POINT(0 0) - - BIGNUMERIC: 0 - - ARRAY: [] - - STRUCT: NOT_APPLICABLE - - JSON: NULL - LAST_FOUR_CHARACTERS (9): - Masking expression shows the last four characters of text. - The masking behavior is as follows: - - - If text length > 4 characters: Replace text with XXXXX, - append last four characters of original text. - - If text length <= 4 characters: Apply SHA-256 hash. - FIRST_FOUR_CHARACTERS (10): - Masking expression shows the first four characters of text. - The masking behavior is as follows: - - - If text length > 4 characters: Replace text with XXXXX, - prepend first four characters of original text. - - If text length <= 4 characters: Apply SHA-256 hash. - EMAIL_MASK (12): - Masking expression for email addresses. The masking behavior - is as follows: - - - Syntax-valid email address: Replace username with XXXXX. - For example, cloudysanfrancisco@gmail.com becomes - XXXXX@gmail.com. - - Syntax-invalid email address: Apply SHA-256 hash. - - For more information, see `Email - mask `__. - DATE_YEAR_MASK (13): - Masking expression to only show the year of ``Date``, - ``DateTime`` and ``TimeStamp``. For example, with the year - 2076: - - - DATE : 2076-01-01 - - DATETIME : 2076-01-01T00:00:00 - - TIMESTAMP : 2076-01-01 00:00:00 UTC - - Truncation occurs according to the UTC time zone. To change - this, adjust the default time zone using the ``time_zone`` - system variable. For more information, see the System - variables reference. - """ - PREDEFINED_EXPRESSION_UNSPECIFIED = 0 - SHA256 = 3 - ALWAYS_NULL = 5 - DEFAULT_MASKING_VALUE = 7 - LAST_FOUR_CHARACTERS = 9 - FIRST_FOUR_CHARACTERS = 10 - EMAIL_MASK = 12 - DATE_YEAR_MASK = 13 - - predefined_expression: PredefinedExpression = proto.Field( - proto.ENUM, - number=1, - oneof='masking_expression', - enum=PredefinedExpression, - ) - routine: str = proto.Field( - proto.STRING, - number=3, - oneof='masking_expression', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/noxfile.py deleted file mode 100644 index ef8c241efb1c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-datapolicies' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_datapolicies_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_datapolicies_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py deleted file mode 100644 index 9da9b3446150..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_CreateDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -async def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = await client.create_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_CreateDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py deleted file mode 100644 index 14a350a3d733..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_CreateDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = client.create_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_CreateDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_async.py deleted file mode 100644 index b6c297f119b8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_DeleteDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -async def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_policy(request=request) - - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_DeleteDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_sync.py deleted file mode 100644 index b3b24d752f61..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_DeleteDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - client.delete_data_policy(request=request) - - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_DeleteDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_async.py deleted file mode 100644 index 6ed331e71579..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_GetDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -async def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_GetDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_sync.py deleted file mode 100644 index 55f9ec3db9f4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_GetDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_GetDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_async.py deleted file mode 100644 index e2ec7ce3b1e8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_sync.py deleted file mode 100644 index c2aa760fad46..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_async.py deleted file mode 100644 index b03963b16b01..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_ListDataPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -async def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_ListDataPolicies_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_sync.py deleted file mode 100644 index 7444fd6824c9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_ListDataPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_ListDataPolicies_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_async.py deleted file mode 100644 index ed145a1f090d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_RenameDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -async def sample_rename_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.RenameDataPolicyRequest( - name="name_value", - new_data_policy_id="new_data_policy_id_value", - ) - - # Make the request - response = await client.rename_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_RenameDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_sync.py deleted file mode 100644 index dd7020b0c9b7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for RenameDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_RenameDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -def sample_rename_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1.RenameDataPolicyRequest( - name="name_value", - new_data_policy_id="new_data_policy_id_value", - ) - - # Make the request - response = client.rename_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_RenameDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_async.py deleted file mode 100644 index 0ced9dc1b660..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_sync.py deleted file mode 100644 index 89ffc46f5217..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_async.py deleted file mode 100644 index 721d9bf4f363..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_sync.py deleted file mode 100644 index 9891c9c73640..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py deleted file mode 100644 index 19423ba797d5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_UpdateDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -async def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = await client.update_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_UpdateDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py deleted file mode 100644 index 520eeb1793d5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1_generated_DataPolicyService_UpdateDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1 - - -def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DATE_YEAR_MASK" - - request = bigquery_datapolicies_v1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = client.update_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1_generated_DataPolicyService_UpdateDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json deleted file mode 100644 index 8f3adb520dfb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ /dev/null @@ -1,1458 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.datapolicies.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-datapolicies", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.create_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.CreateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "CreateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.CreateDataPolicyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "create_data_policy" - }, - "description": "Sample for CreateDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_CreateDataPolicy_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.create_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.CreateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "CreateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.CreateDataPolicyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "create_data_policy" - }, - "description": "Sample for CreateDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_CreateDataPolicy_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_create_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.delete_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.DeleteDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "DeleteDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.DeleteDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_policy" - }, - "description": "Sample for DeleteDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_DeleteDataPolicy_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.delete_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.DeleteDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "DeleteDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.DeleteDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_policy" - }, - "description": "Sample for DeleteDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_DeleteDataPolicy_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_delete_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.get_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.GetDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.GetDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "get_data_policy" - }, - "description": "Sample for GetDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_GetDataPolicy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.get_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.GetDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.GetDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "get_data_policy" - }, - "description": "Sample for GetDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_GetDataPolicy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_get_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.list_data_policies", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.ListDataPolicies", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "ListDataPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.services.data_policy_service.pagers.ListDataPoliciesAsyncPager", - "shortName": "list_data_policies" - }, - "description": "Sample for ListDataPolicies", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_ListDataPolicies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.list_data_policies", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.ListDataPolicies", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "ListDataPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.ListDataPoliciesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.services.data_policy_service.pagers.ListDataPoliciesPager", - "shortName": "list_data_policies" - }, - "description": "Sample for ListDataPolicies", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_ListDataPolicies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_list_data_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.rename_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.RenameDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "RenameDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.RenameDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_data_policy_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "rename_data_policy" - }, - "description": "Sample for RenameDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_RenameDataPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.rename_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.RenameDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "RenameDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.RenameDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "new_data_policy_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "rename_data_policy" - }, - "description": "Sample for RenameDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_RenameDataPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_rename_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceAsyncClient.update_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.UpdateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "UpdateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.UpdateDataPolicyRequest" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "update_data_policy" - }, - "description": "Sample for UpdateDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_UpdateDataPolicy_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1.DataPolicyServiceClient.update_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService.UpdateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "UpdateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1.types.UpdateDataPolicyRequest" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1.types.DataPolicy", - "shortName": "update_data_policy" - }, - "description": "Sample for UpdateDataPolicy", - "file": "bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1_generated_DataPolicyService_UpdateDataPolicy_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1_generated_data_policy_service_update_data_policy_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/scripts/fixup_bigquery_datapolicies_v1_keywords.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/scripts/fixup_bigquery_datapolicies_v1_keywords.py deleted file mode 100644 index 1d693e278a21..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/scripts/fixup_bigquery_datapolicies_v1_keywords.py +++ /dev/null @@ -1,184 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_datapoliciesCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_data_policy': ('parent', 'data_policy', ), - 'delete_data_policy': ('name', ), - 'get_data_policy': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'list_data_policies': ('parent', 'page_size', 'page_token', 'filter', ), - 'rename_data_policy': ('name', 'new_data_policy_id', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_data_policy': ('data_policy', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_datapoliciesCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_datapolicies client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/setup.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/setup.py deleted file mode 100644 index 474139ddf62e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-datapolicies' - - -description = "Google Cloud Bigquery Datapolicies API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_datapolicies/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/bigquery_datapolicies_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/bigquery_datapolicies_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/bigquery_datapolicies_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py deleted file mode 100644 index 7fc634488efe..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1/tests/unit/gapic/bigquery_datapolicies_v1/test_data_policy_service.py +++ /dev/null @@ -1,7526 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service import DataPolicyServiceAsyncClient -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service import DataPolicyServiceClient -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service import pagers -from google.cloud.bigquery_datapolicies_v1.services.data_policy_service import transports -from google.cloud.bigquery_datapolicies_v1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataPolicyServiceClient._get_default_mtls_endpoint(None) is None - assert DataPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataPolicyServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataPolicyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataPolicyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataPolicyServiceClient._get_client_cert_source(None, False) is None - assert DataPolicyServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataPolicyServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataPolicyServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataPolicyServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataPolicyServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataPolicyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataPolicyServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataPolicyServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataPolicyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataPolicyServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataPolicyServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataPolicyServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataPolicyServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataPolicyServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataPolicyServiceClient._get_universe_domain(None, None) == DataPolicyServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataPolicyServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DataPolicyServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DataPolicyServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataPolicyServiceClient, "grpc"), - (DataPolicyServiceAsyncClient, "grpc_asyncio"), - (DataPolicyServiceClient, "rest"), -]) -def test_data_policy_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigquerydatapolicy.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataPolicyServiceGrpcTransport, "grpc"), - (transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DataPolicyServiceRestTransport, "rest"), -]) -def test_data_policy_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataPolicyServiceClient, "grpc"), - (DataPolicyServiceAsyncClient, "grpc_asyncio"), - (DataPolicyServiceClient, "rest"), -]) -def test_data_policy_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigquerydatapolicy.googleapis.com' - ) - - -def test_data_policy_service_client_get_transport_class(): - transport = DataPolicyServiceClient.get_transport_class() - available_transports = [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceRestTransport, - ] - assert transport in available_transports - - transport = DataPolicyServiceClient.get_transport_class("grpc") - assert transport == transports.DataPolicyServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataPolicyServiceClient, transports.DataPolicyServiceRestTransport, "rest"), -]) -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -def test_data_policy_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataPolicyServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataPolicyServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", "true"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", "false"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DataPolicyServiceClient, transports.DataPolicyServiceRestTransport, "rest", "true"), - (DataPolicyServiceClient, transports.DataPolicyServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_policy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataPolicyServiceClient, DataPolicyServiceAsyncClient -]) -@mock.patch.object(DataPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataPolicyServiceAsyncClient)) -def test_data_policy_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataPolicyServiceClient, DataPolicyServiceAsyncClient -]) -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -def test_data_policy_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataPolicyServiceClient, transports.DataPolicyServiceRestTransport, "rest"), -]) -def test_data_policy_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", grpc_helpers), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DataPolicyServiceClient, transports.DataPolicyServiceRestTransport, "rest", None), -]) -def test_data_policy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_policy_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_datapolicies_v1.services.data_policy_service.transports.DataPolicyServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataPolicyServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", grpc_helpers), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_policy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigquerydatapolicy.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigquerydatapolicy.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.CreateDataPolicyRequest, - dict, -]) -def test_create_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.CreateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_create_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.CreateDataPolicyRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.CreateDataPolicyRequest( - parent='parent_value', - ) - -def test_create_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_policy] = mock_rpc - request = {} - client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_policy] = mock_rpc - - request = {} - await client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.CreateDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.CreateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_create_data_policy_async_from_dict(): - await test_create_data_policy_async(request_type=dict) - -def test_create_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.CreateDataPolicyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.CreateDataPolicyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_policy( - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - - -def test_create_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_policy( - datapolicy.CreateDataPolicyRequest(), - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - -@pytest.mark.asyncio -async def test_create_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_policy( - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_policy( - datapolicy.CreateDataPolicyRequest(), - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.UpdateDataPolicyRequest, - dict, -]) -def test_update_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.UpdateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_update_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.UpdateDataPolicyRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.UpdateDataPolicyRequest( - ) - -def test_update_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_policy] = mock_rpc - request = {} - client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_policy] = mock_rpc - - request = {} - await client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.UpdateDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.UpdateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_update_data_policy_async_from_dict(): - await test_update_data_policy_async(request_type=dict) - -def test_update_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.UpdateDataPolicyRequest() - - request.data_policy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_policy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.UpdateDataPolicyRequest() - - request.data_policy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_policy.name=name_value', - ) in kw['metadata'] - - -def test_update_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_policy( - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_policy( - datapolicy.UpdateDataPolicyRequest(), - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_policy( - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_policy( - datapolicy.UpdateDataPolicyRequest(), - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.RenameDataPolicyRequest, - dict, -]) -def test_rename_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.RenameDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_rename_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.RenameDataPolicyRequest( - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.rename_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.RenameDataPolicyRequest( - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - -def test_rename_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_data_policy] = mock_rpc - request = {} - client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.rename_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.rename_data_policy] = mock_rpc - - request = {} - await client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.rename_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_rename_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.RenameDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.RenameDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_rename_data_policy_async_from_dict(): - await test_rename_data_policy_async(request_type=dict) - -def test_rename_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.RenameDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_rename_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.RenameDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_rename_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.rename_data_policy( - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_data_policy_id - mock_val = 'new_data_policy_id_value' - assert arg == mock_val - - -def test_rename_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_data_policy( - datapolicy.RenameDataPolicyRequest(), - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - -@pytest.mark.asyncio -async def test_rename_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.rename_data_policy( - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].new_data_policy_id - mock_val = 'new_data_policy_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_rename_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.rename_data_policy( - datapolicy.RenameDataPolicyRequest(), - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.DeleteDataPolicyRequest, - dict, -]) -def test_delete_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.DeleteDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.DeleteDataPolicyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.DeleteDataPolicyRequest( - name='name_value', - ) - -def test_delete_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_policy] = mock_rpc - request = {} - client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_policy] = mock_rpc - - request = {} - await client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.DeleteDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.DeleteDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_data_policy_async_from_dict(): - await test_delete_data_policy_async(request_type=dict) - -def test_delete_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.DeleteDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value = None - client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.DeleteDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_policy( - datapolicy.DeleteDataPolicyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_policy( - datapolicy.DeleteDataPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.GetDataPolicyRequest, - dict, -]) -def test_get_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.GetDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_get_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.GetDataPolicyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.GetDataPolicyRequest( - name='name_value', - ) - -def test_get_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_policy] = mock_rpc - request = {} - client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_policy] = mock_rpc - - request = {} - await client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.GetDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.GetDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_get_data_policy_async_from_dict(): - await test_get_data_policy_async(request_type=dict) - -def test_get_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.GetDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.GetDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_policy( - datapolicy.GetDataPolicyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_policy( - datapolicy.GetDataPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.ListDataPoliciesRequest, - dict, -]) -def test_list_data_policies(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.ListDataPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_policies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.ListDataPoliciesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_policies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.ListDataPoliciesRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_data_policies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_policies] = mock_rpc - request = {} - client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_policies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_policies] = mock_rpc - - request = {} - await client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_policies_async(transport: str = 'grpc_asyncio', request_type=datapolicy.ListDataPoliciesRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.ListDataPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_policies_async_from_dict(): - await test_list_data_policies_async(request_type=dict) - -def test_list_data_policies_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.ListDataPoliciesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value = datapolicy.ListDataPoliciesResponse() - client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_policies_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.ListDataPoliciesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse()) - await client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_policies_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.ListDataPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_policies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_policies_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_policies( - datapolicy.ListDataPoliciesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_policies_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.ListDataPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_policies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_policies_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_policies( - datapolicy.ListDataPoliciesRequest(), - parent='parent_value', - ) - - -def test_list_data_policies_pager(transport_name: str = "grpc"): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_policies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datapolicy.DataPolicy) - for i in results) -def test_list_data_policies_pages(transport_name: str = "grpc"): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_policies_async_pager(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datapolicy.DataPolicy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_policies_async_pages(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_create_data_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_policy] = mock_rpc - - request = {} - client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_data_policy_rest_required_fields(request_type=datapolicy.CreateDataPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_data_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_data_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_data_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "dataPolicy", ))) - - -def test_create_data_policy_rest_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_data_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataPolicies" % client.transport._host, args[1]) - - -def test_create_data_policy_rest_flattened_error(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_policy( - datapolicy.CreateDataPolicyRequest(), - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - -def test_update_data_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_policy] = mock_rpc - - request = {} - client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_data_policy_rest_required_fields(request_type=datapolicy.UpdateDataPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_data_policy._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_data_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_data_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_data_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("dataPolicy", ))) - - -def test_update_data_policy_rest_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'data_policy': {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_data_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{data_policy.name=projects/*/locations/*/dataPolicies/*}" % client.transport._host, args[1]) - - -def test_update_data_policy_rest_flattened_error(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_policy( - datapolicy.UpdateDataPolicyRequest(), - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_rename_data_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.rename_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.rename_data_policy] = mock_rpc - - request = {} - client.rename_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.rename_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_rename_data_policy_rest_required_fields(request_type=datapolicy.RenameDataPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request_init["new_data_policy_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - jsonified_request["newDataPolicyId"] = 'new_data_policy_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).rename_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "newDataPolicyId" in jsonified_request - assert jsonified_request["newDataPolicyId"] == 'new_data_policy_id_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.rename_data_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_rename_data_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.rename_data_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", "newDataPolicyId", ))) - - -def test_rename_data_policy_rest_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.rename_data_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataPolicies/*}:rename" % client.transport._host, args[1]) - - -def test_rename_data_policy_rest_flattened_error(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.rename_data_policy( - datapolicy.RenameDataPolicyRequest(), - name='name_value', - new_data_policy_id='new_data_policy_id_value', - ) - - -def test_delete_data_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_policy] = mock_rpc - - request = {} - client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_data_policy_rest_required_fields(request_type=datapolicy.DeleteDataPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_data_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_data_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_data_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_data_policy_rest_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_data_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataPolicies/*}" % client.transport._host, args[1]) - - -def test_delete_data_policy_rest_flattened_error(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_policy( - datapolicy.DeleteDataPolicyRequest(), - name='name_value', - ) - - -def test_get_data_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_policy] = mock_rpc - - request = {} - client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_policy_rest_required_fields(request_type=datapolicy.GetDataPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_data_policy_rest_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_policy(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataPolicies/*}" % client.transport._host, args[1]) - - -def test_get_data_policy_rest_flattened_error(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_policy( - datapolicy.GetDataPolicyRequest(), - name='name_value', - ) - - -def test_list_data_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_policies] = mock_rpc - - request = {} - client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_policies_rest_required_fields(request_type=datapolicy.ListDataPoliciesRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_policies._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_policies._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datapolicy.ListDataPoliciesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.ListDataPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_policies(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_policies_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_policies_rest_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.ListDataPoliciesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datapolicy.ListDataPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_policies(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataPolicies" % client.transport._host, args[1]) - - -def test_list_data_policies_rest_flattened_error(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_policies( - datapolicy.ListDataPoliciesRequest(), - parent='parent_value', - ) - - -def test_list_data_policies_rest_pager(transport: str = 'rest'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datapolicy.ListDataPoliciesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_data_policies(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datapolicy.DataPolicy) - for i in results) - - pages = list(client.list_data_policies(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_iam_policy_rest_required_fields(request_type=iam_policy_pb2.GetIamPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_iam_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", ))) - - -def test_set_iam_policy_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_set_iam_policy_rest_required_fields(request_type=iam_policy_pb2.SetIamPolicyRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).set_iam_policy._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.set_iam_policy(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_set_iam_policy_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.set_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "policy", ))) - - -def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_test_iam_permissions_rest_required_fields(request_type=iam_policy_pb2.TestIamPermissionsRequest): - transport_class = transports.DataPolicyServiceRestTransport - - request_init = {} - request_init["resource"] = "" - request_init["permissions"] = "" - request = request_type(**request_init) - pb_request = request - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resource"] = 'resource_value' - jsonified_request["permissions"] = 'permissions_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).test_iam_permissions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' - assert "permissions" in jsonified_request - assert jsonified_request["permissions"] == 'permissions_value' - - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.test_iam_permissions(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_test_iam_permissions_rest_unset_required_fields(): - transport = transports.DataPolicyServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resource", "permissions", ))) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataPolicyServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataPolicyServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceGrpcAsyncIOTransport, - transports.DataPolicyServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataPolicyServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.create_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.CreateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.update_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.UpdateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.rename_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.RenameDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value = None - client.delete_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.DeleteDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.get_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.GetDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_policies_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value = datapolicy.ListDataPoliciesResponse() - client.list_data_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.ListDataPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataPolicyServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.create_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.CreateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.update_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.UpdateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_rename_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.rename_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.RenameDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.DeleteDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.get_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.GetDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_policies_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.ListDataPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DataPolicyServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_data_policy_rest_bad_request(request_type=datapolicy.CreateDataPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_data_policy(request) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.CreateDataPolicyRequest, - dict, -]) -def test_create_data_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["data_policy"] = {'policy_tag': 'policy_tag_value', 'data_masking_policy': {'predefined_expression': 3, 'routine': 'routine_value'}, 'name': 'name_value', 'data_policy_type': 3, 'data_policy_id': 'data_policy_id_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datapolicy.CreateDataPolicyRequest.meta.fields["data_policy"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_policy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_policy"][field])): - del request_init["data_policy"][field][i][subfield] - else: - del request_init["data_policy"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_data_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_data_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_create_data_policy") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_create_data_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_create_data_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datapolicy.CreateDataPolicyRequest.pb(datapolicy.CreateDataPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datapolicy.DataPolicy.to_json(datapolicy.DataPolicy()) - req.return_value.content = return_value - - request = datapolicy.CreateDataPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datapolicy.DataPolicy() - post_with_metadata.return_value = datapolicy.DataPolicy(), metadata - - client.create_data_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_data_policy_rest_bad_request(request_type=datapolicy.UpdateDataPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'data_policy': {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_policy(request) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.UpdateDataPolicyRequest, - dict, -]) -def test_update_data_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'data_policy': {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'}} - request_init["data_policy"] = {'policy_tag': 'policy_tag_value', 'data_masking_policy': {'predefined_expression': 3, 'routine': 'routine_value'}, 'name': 'projects/sample1/locations/sample2/dataPolicies/sample3', 'data_policy_type': 3, 'data_policy_id': 'data_policy_id_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datapolicy.UpdateDataPolicyRequest.meta.fields["data_policy"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_policy"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_policy"][field])): - del request_init["data_policy"][field][i][subfield] - else: - del request_init["data_policy"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_update_data_policy") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_update_data_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_update_data_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datapolicy.UpdateDataPolicyRequest.pb(datapolicy.UpdateDataPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datapolicy.DataPolicy.to_json(datapolicy.DataPolicy()) - req.return_value.content = return_value - - request = datapolicy.UpdateDataPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datapolicy.DataPolicy() - post_with_metadata.return_value = datapolicy.DataPolicy(), metadata - - client.update_data_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_rename_data_policy_rest_bad_request(request_type=datapolicy.RenameDataPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.rename_data_policy(request) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.RenameDataPolicyRequest, - dict, -]) -def test_rename_data_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.rename_data_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rename_data_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_rename_data_policy") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_rename_data_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_rename_data_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datapolicy.RenameDataPolicyRequest.pb(datapolicy.RenameDataPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datapolicy.DataPolicy.to_json(datapolicy.DataPolicy()) - req.return_value.content = return_value - - request = datapolicy.RenameDataPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datapolicy.DataPolicy() - post_with_metadata.return_value = datapolicy.DataPolicy(), metadata - - client.rename_data_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_data_policy_rest_bad_request(request_type=datapolicy.DeleteDataPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_data_policy(request) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.DeleteDataPolicyRequest, - dict, -]) -def test_delete_data_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_data_policy(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_data_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_delete_data_policy") as pre: - pre.assert_not_called() - pb_message = datapolicy.DeleteDataPolicyRequest.pb(datapolicy.DeleteDataPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = datapolicy.DeleteDataPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_data_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_data_policy_rest_bad_request(request_type=datapolicy.GetDataPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_policy(request) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.GetDataPolicyRequest, - dict, -]) -def test_get_data_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.DataPolicy.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_get_data_policy") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_get_data_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_get_data_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datapolicy.GetDataPolicyRequest.pb(datapolicy.GetDataPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datapolicy.DataPolicy.to_json(datapolicy.DataPolicy()) - req.return_value.content = return_value - - request = datapolicy.GetDataPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datapolicy.DataPolicy() - post_with_metadata.return_value = datapolicy.DataPolicy(), metadata - - client.get_data_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_policies_rest_bad_request(request_type=datapolicy.ListDataPoliciesRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_policies(request) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.ListDataPoliciesRequest, - dict, -]) -def test_list_data_policies_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datapolicy.ListDataPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_policies_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_list_data_policies") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_list_data_policies_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_list_data_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datapolicy.ListDataPoliciesRequest.pb(datapolicy.ListDataPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datapolicy.ListDataPoliciesResponse.to_json(datapolicy.ListDataPoliciesResponse()) - req.return_value.content = return_value - - request = datapolicy.ListDataPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datapolicy.ListDataPoliciesResponse() - post_with_metadata.return_value = datapolicy.ListDataPoliciesResponse(), metadata - - client.list_data_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_iam_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_get_iam_policy") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_get_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_get_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.GetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.GetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.get_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_iam_policy(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_set_iam_policy_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_set_iam_policy") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_set_iam_policy_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_set_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.SetIamPolicyRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(policy_pb2.Policy()) - req.return_value.content = return_value - - request = iam_policy_pb2.SetIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = policy_pb2.Policy() - post_with_metadata.return_value = policy_pb2.Policy(), metadata - - client.set_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_iam_permissions(request) - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest_call_success(request_type): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'projects/sample1/locations/sample2/dataPolicies/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_test_iam_permissions_rest_interceptors(null_interceptor): - transport = transports.DataPolicyServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataPolicyServiceRestInterceptor(), - ) - client = DataPolicyServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_test_iam_permissions") as post, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "post_test_iam_permissions_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataPolicyServiceRestInterceptor, "pre_test_iam_permissions") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = iam_policy_pb2.TestIamPermissionsRequest() - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(iam_policy_pb2.TestIamPermissionsResponse()) - req.return_value.content = return_value - - request = iam_policy_pb2.TestIamPermissionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = iam_policy_pb2.TestIamPermissionsResponse() - post_with_metadata.return_value = iam_policy_pb2.TestIamPermissionsResponse(), metadata - - client.test_iam_permissions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - client.create_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.CreateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - client.update_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.UpdateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_rename_data_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.rename_data_policy), - '__call__') as call: - client.rename_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.RenameDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - client.delete_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.DeleteDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - client.get_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.GetDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_policies_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - client.list_data_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.ListDataPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataPolicyServiceGrpcTransport, - ) - -def test_data_policy_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_policy_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_datapolicies_v1.services.data_policy_service.transports.DataPolicyServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_data_policy', - 'update_data_policy', - 'rename_data_policy', - 'delete_data_policy', - 'get_data_policy', - 'list_data_policies', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_policy_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_datapolicies_v1.services.data_policy_service.transports.DataPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataPolicyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_policy_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_datapolicies_v1.services.data_policy_service.transports.DataPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataPolicyServiceTransport() - adc.assert_called_once() - - -def test_data_policy_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataPolicyServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceGrpcAsyncIOTransport, - ], -) -def test_data_policy_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceGrpcAsyncIOTransport, - transports.DataPolicyServiceRestTransport, - ], -) -def test_data_policy_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataPolicyServiceGrpcTransport, grpc_helpers), - (transports.DataPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_policy_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigquerydatapolicy.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigquerydatapolicy.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataPolicyServiceGrpcTransport, transports.DataPolicyServiceGrpcAsyncIOTransport]) -def test_data_policy_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_data_policy_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DataPolicyServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_policy_service_host_no_port(transport_name): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerydatapolicy.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigquerydatapolicy.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_policy_service_host_with_port(transport_name): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerydatapolicy.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigquerydatapolicy.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_data_policy_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DataPolicyServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DataPolicyServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_data_policy._session - session2 = client2.transport.create_data_policy._session - assert session1 != session2 - session1 = client1.transport.update_data_policy._session - session2 = client2.transport.update_data_policy._session - assert session1 != session2 - session1 = client1.transport.rename_data_policy._session - session2 = client2.transport.rename_data_policy._session - assert session1 != session2 - session1 = client1.transport.delete_data_policy._session - session2 = client2.transport.delete_data_policy._session - assert session1 != session2 - session1 = client1.transport.get_data_policy._session - session2 = client2.transport.get_data_policy._session - assert session1 != session2 - session1 = client1.transport.list_data_policies._session - session2 = client2.transport.list_data_policies._session - assert session1 != session2 - session1 = client1.transport.get_iam_policy._session - session2 = client2.transport.get_iam_policy._session - assert session1 != session2 - session1 = client1.transport.set_iam_policy._session - session2 = client2.transport.set_iam_policy._session - assert session1 != session2 - session1 = client1.transport.test_iam_permissions._session - session2 = client2.transport.test_iam_permissions._session - assert session1 != session2 -def test_data_policy_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataPolicyServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_policy_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataPolicyServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataPolicyServiceGrpcTransport, transports.DataPolicyServiceGrpcAsyncIOTransport]) -def test_data_policy_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataPolicyServiceGrpcTransport, transports.DataPolicyServiceGrpcAsyncIOTransport]) -def test_data_policy_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_policy_path(): - project = "squid" - location = "clam" - data_policy = "whelk" - expected = "projects/{project}/locations/{location}/dataPolicies/{data_policy}".format(project=project, location=location, data_policy=data_policy, ) - actual = DataPolicyServiceClient.data_policy_path(project, location, data_policy) - assert expected == actual - - -def test_parse_data_policy_path(): - expected = { - "project": "octopus", - "location": "oyster", - "data_policy": "nudibranch", - } - path = DataPolicyServiceClient.data_policy_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_data_policy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataPolicyServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = DataPolicyServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataPolicyServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = DataPolicyServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataPolicyServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = DataPolicyServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = DataPolicyServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = DataPolicyServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataPolicyServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = DataPolicyServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataPolicyServiceTransport, '_prep_wrapped_messages') as prep: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataPolicyServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataPolicyServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/.coveragerc deleted file mode 100644 index 0c3effcc3668..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_datapolicies/__init__.py - google/cloud/bigquery_datapolicies/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/.flake8 b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/MANIFEST.in deleted file mode 100644 index ddda09949792..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_datapolicies *.py -recursive-include google/cloud/bigquery_datapolicies_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/README.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/README.rst deleted file mode 100644 index 074e1245b6d1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Datapolicies API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Datapolicies API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/data_policy_service.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/data_policy_service.rst deleted file mode 100644 index e4198c590cde..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/data_policy_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataPolicyService ------------------------------------ - -.. automodule:: google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/services_.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/services_.rst deleted file mode 100644 index 38e5512e1111..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Datapolicies v1beta1 API -=========================================================== -.. toctree:: - :maxdepth: 2 - - data_policy_service diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/types_.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/types_.rst deleted file mode 100644 index 2322c0ee11d4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/bigquery_datapolicies_v1beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Datapolicies v1beta1 API -======================================================== - -.. automodule:: google.cloud.bigquery_datapolicies_v1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/conf.py deleted file mode 100644 index ffabfdc34894..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-datapolicies documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-datapolicies" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-datapolicies-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-datapolicies.tex", - u"google-cloud-bigquery-datapolicies Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-datapolicies", - u"Google Cloud Bigquery Datapolicies Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-datapolicies", - u"google-cloud-bigquery-datapolicies Documentation", - author, - "google-cloud-bigquery-datapolicies", - "GAPIC library for Google Cloud Bigquery Datapolicies API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/index.rst deleted file mode 100644 index 7df22abbcc9b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_datapolicies_v1beta1/services_ - bigquery_datapolicies_v1beta1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/__init__.py deleted file mode 100644 index beff5119c030..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_datapolicies import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.client import DataPolicyServiceClient -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.async_client import DataPolicyServiceAsyncClient - -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import CreateDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import DataMaskingPolicy -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import DataPolicy -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import DeleteDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import GetDataPolicyRequest -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import ListDataPoliciesRequest -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import ListDataPoliciesResponse -from google.cloud.bigquery_datapolicies_v1beta1.types.datapolicy import UpdateDataPolicyRequest - -__all__ = ('DataPolicyServiceClient', - 'DataPolicyServiceAsyncClient', - 'CreateDataPolicyRequest', - 'DataMaskingPolicy', - 'DataPolicy', - 'DeleteDataPolicyRequest', - 'GetDataPolicyRequest', - 'ListDataPoliciesRequest', - 'ListDataPoliciesResponse', - 'UpdateDataPolicyRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/py.typed b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/py.typed deleted file mode 100644 index ff02bfbdb46b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-datapolicies package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/__init__.py deleted file mode 100644 index 221ce8cb5548..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_datapolicies_v1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.data_policy_service import DataPolicyServiceClient -from .services.data_policy_service import DataPolicyServiceAsyncClient - -from .types.datapolicy import CreateDataPolicyRequest -from .types.datapolicy import DataMaskingPolicy -from .types.datapolicy import DataPolicy -from .types.datapolicy import DeleteDataPolicyRequest -from .types.datapolicy import GetDataPolicyRequest -from .types.datapolicy import ListDataPoliciesRequest -from .types.datapolicy import ListDataPoliciesResponse -from .types.datapolicy import UpdateDataPolicyRequest - -__all__ = ( - 'DataPolicyServiceAsyncClient', -'CreateDataPolicyRequest', -'DataMaskingPolicy', -'DataPolicy', -'DataPolicyServiceClient', -'DeleteDataPolicyRequest', -'GetDataPolicyRequest', -'ListDataPoliciesRequest', -'ListDataPoliciesResponse', -'UpdateDataPolicyRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/gapic_metadata.json deleted file mode 100644 index 229de39247fe..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_datapolicies_v1beta1", - "protoPackage": "google.cloud.bigquery.datapolicies.v1beta1", - "schema": "1.0", - "services": { - "DataPolicyService": { - "clients": { - "grpc": { - "libraryClient": "DataPolicyServiceClient", - "rpcs": { - "CreateDataPolicy": { - "methods": [ - "create_data_policy" - ] - }, - "DeleteDataPolicy": { - "methods": [ - "delete_data_policy" - ] - }, - "GetDataPolicy": { - "methods": [ - "get_data_policy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListDataPolicies": { - "methods": [ - "list_data_policies" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataPolicy": { - "methods": [ - "update_data_policy" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataPolicyServiceAsyncClient", - "rpcs": { - "CreateDataPolicy": { - "methods": [ - "create_data_policy" - ] - }, - "DeleteDataPolicy": { - "methods": [ - "delete_data_policy" - ] - }, - "GetDataPolicy": { - "methods": [ - "get_data_policy" - ] - }, - "GetIamPolicy": { - "methods": [ - "get_iam_policy" - ] - }, - "ListDataPolicies": { - "methods": [ - "list_data_policies" - ] - }, - "SetIamPolicy": { - "methods": [ - "set_iam_policy" - ] - }, - "TestIamPermissions": { - "methods": [ - "test_iam_permissions" - ] - }, - "UpdateDataPolicy": { - "methods": [ - "update_data_policy" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/py.typed b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/py.typed deleted file mode 100644 index ff02bfbdb46b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-datapolicies package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/__init__.py deleted file mode 100644 index 45491be42598..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataPolicyServiceClient -from .async_client import DataPolicyServiceAsyncClient - -__all__ = ( - 'DataPolicyServiceClient', - 'DataPolicyServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py deleted file mode 100644 index c3e92ee549e6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py +++ /dev/null @@ -1,1158 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_datapolicies_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service import pagers -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataPolicyServiceGrpcAsyncIOTransport -from .client import DataPolicyServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DataPolicyServiceAsyncClient: - """Data Policy Service provides APIs for managing the - label-policy bindings. - """ - - _client: DataPolicyServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataPolicyServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataPolicyServiceClient._DEFAULT_UNIVERSE - - data_policy_path = staticmethod(DataPolicyServiceClient.data_policy_path) - parse_data_policy_path = staticmethod(DataPolicyServiceClient.parse_data_policy_path) - common_billing_account_path = staticmethod(DataPolicyServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataPolicyServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataPolicyServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataPolicyServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataPolicyServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataPolicyServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataPolicyServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataPolicyServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataPolicyServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataPolicyServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceAsyncClient: The constructed client. - """ - return DataPolicyServiceClient.from_service_account_info.__func__(DataPolicyServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceAsyncClient: The constructed client. - """ - return DataPolicyServiceClient.from_service_account_file.__func__(DataPolicyServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataPolicyServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataPolicyServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataPolicyServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataPolicyServiceTransport, Callable[..., DataPolicyServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data policy service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataPolicyServiceTransport,Callable[..., DataPolicyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataPolicyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataPolicyServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.datapolicies_v1beta1.DataPolicyServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "credentialsType": None, - } - ) - - async def create_data_policy(self, - request: Optional[Union[datapolicy.CreateDataPolicyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_policy: Optional[datapolicy.DataPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - async def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = await client.create_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1beta1.types.CreateDataPolicyRequest, dict]]): - The request object. Request message for the - CreateDataPolicy method. - parent (:class:`str`): - Required. Resource name of the project that the data - policy will belong to. The format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_policy (:class:`google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy`): - Required. The data policy to create. The ``name`` field - does not need to be provided for the data policy - creation. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_policy] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.CreateDataPolicyRequest): - request = datapolicy.CreateDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_policy is not None: - request.data_policy = data_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_data_policy(self, - request: Optional[Union[datapolicy.UpdateDataPolicyRequest, dict]] = None, - *, - data_policy: Optional[datapolicy.DataPolicy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - async def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = await client.update_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1beta1.types.UpdateDataPolicyRequest, dict]]): - The request object. Response message for the - UpdateDataPolicy method. - data_policy (:class:`google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy`): - Required. Update the data policy's metadata. - - The target data policy is determined by the ``name`` - field. Other fields are updated to the specified values - based on the field masks. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - Updates to the ``name`` and ``dataPolicyId`` fields are - not allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_policy, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.UpdateDataPolicyRequest): - request = datapolicy.UpdateDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_policy is not None: - request.data_policy = data_policy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_policy.name", request.data_policy.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_data_policy(self, - request: Optional[Union[datapolicy.DeleteDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the data policy specified by its resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - async def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_policy(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1beta1.types.DeleteDataPolicyRequest, dict]]): - The request object. Request message for the - DeleteDataPolicy method. - name (:class:`str`): - Required. Resource name of the data policy to delete. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.DeleteDataPolicyRequest): - request = datapolicy.DeleteDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_data_policy(self, - request: Optional[Union[datapolicy.GetDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Gets the data policy specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - async def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1beta1.types.GetDataPolicyRequest, dict]]): - The request object. Request message for the GetDataPolicy - method. - name (:class:`str`): - Required. Resource name of the requested data policy. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.GetDataPolicyRequest): - request = datapolicy.GetDataPolicyRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_policies(self, - request: Optional[Union[datapolicy.ListDataPoliciesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataPoliciesAsyncPager: - r"""List all of the data policies in the specified parent - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - async def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesRequest, dict]]): - The request object. Request message for the - ListDataPolicies method. - parent (:class:`str`): - Required. Resource name of the project for which to list - data policies. Format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.pagers.ListDataPoliciesAsyncPager: - Response message for the - ListDataPolicies method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.ListDataPoliciesRequest): - request = datapolicy.ListDataPoliciesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataPoliciesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permission on the specified data - policy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - async def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataPolicyServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataPolicyServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py deleted file mode 100644 index 214cd00e6f9f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py +++ /dev/null @@ -1,1516 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_datapolicies_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service import pagers -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataPolicyServiceGrpcTransport -from .transports.grpc_asyncio import DataPolicyServiceGrpcAsyncIOTransport - - -class DataPolicyServiceClientMeta(type): - """Metaclass for the DataPolicyService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataPolicyServiceTransport]] - _transport_registry["grpc"] = DataPolicyServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataPolicyServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataPolicyServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataPolicyServiceClient(metaclass=DataPolicyServiceClientMeta): - """Data Policy Service provides APIs for managing the - label-policy bindings. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigquerydatapolicy.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatapolicy.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataPolicyServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataPolicyServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataPolicyServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_policy_path(project: str,location: str,data_policy: str,) -> str: - """Returns a fully-qualified data_policy string.""" - return "projects/{project}/locations/{location}/dataPolicies/{data_policy}".format(project=project, location=location, data_policy=data_policy, ) - - @staticmethod - def parse_data_policy_path(path: str) -> Dict[str,str]: - """Parses a data_policy path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/dataPolicies/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataPolicyServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataPolicyServiceTransport, Callable[..., DataPolicyServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data policy service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataPolicyServiceTransport,Callable[..., DataPolicyServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataPolicyServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataPolicyServiceClient._read_environment_variables() - self._client_cert_source = DataPolicyServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataPolicyServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataPolicyServiceTransport) - if transport_provided: - # transport is a DataPolicyServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataPolicyServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataPolicyServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataPolicyServiceTransport], Callable[..., DataPolicyServiceTransport]] = ( - DataPolicyServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataPolicyServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.datapolicies_v1beta1.DataPolicyServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "credentialsType": None, - } - ) - - def create_data_policy(self, - request: Optional[Union[datapolicy.CreateDataPolicyRequest, dict]] = None, - *, - parent: Optional[str] = None, - data_policy: Optional[datapolicy.DataPolicy] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = client.create_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1beta1.types.CreateDataPolicyRequest, dict]): - The request object. Request message for the - CreateDataPolicy method. - parent (str): - Required. Resource name of the project that the data - policy will belong to. The format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - data_policy (google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy): - Required. The data policy to create. The ``name`` field - does not need to be provided for the data policy - creation. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, data_policy] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.CreateDataPolicyRequest): - request = datapolicy.CreateDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if data_policy is not None: - request.data_policy = data_policy - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_data_policy(self, - request: Optional[Union[datapolicy.UpdateDataPolicyRequest, dict]] = None, - *, - data_policy: Optional[datapolicy.DataPolicy] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = client.update_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1beta1.types.UpdateDataPolicyRequest, dict]): - The request object. Response message for the - UpdateDataPolicy method. - data_policy (google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy): - Required. Update the data policy's metadata. - - The target data policy is determined by the ``name`` - field. Other fields are updated to the specified values - based on the field masks. - - This corresponds to the ``data_policy`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are - allowed to update. - - Updates to the ``name`` and ``dataPolicyId`` fields are - not allowed. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [data_policy, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.UpdateDataPolicyRequest): - request = datapolicy.UpdateDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if data_policy is not None: - request.data_policy = data_policy - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("data_policy.name", request.data_policy.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_data_policy(self, - request: Optional[Union[datapolicy.DeleteDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the data policy specified by its resource - name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - client.delete_data_policy(request=request) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1beta1.types.DeleteDataPolicyRequest, dict]): - The request object. Request message for the - DeleteDataPolicy method. - name (str): - Required. Resource name of the data policy to delete. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.DeleteDataPolicyRequest): - request = datapolicy.DeleteDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_data_policy(self, - request: Optional[Union[datapolicy.GetDataPolicyRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datapolicy.DataPolicy: - r"""Gets the data policy specified by its resource name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1beta1.types.GetDataPolicyRequest, dict]): - The request object. Request message for the GetDataPolicy - method. - name (str): - Required. Resource name of the requested data policy. - Format is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy: - Represents the label-policy binding. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.GetDataPolicyRequest): - request = datapolicy.GetDataPolicyRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_policies(self, - request: Optional[Union[datapolicy.ListDataPoliciesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataPoliciesPager: - r"""List all of the data policies in the specified parent - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - - def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesRequest, dict]): - The request object. Request message for the - ListDataPolicies method. - parent (str): - Required. Resource name of the project for which to list - data policies. Format is - ``projects/{project_number}/locations/{location_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.pagers.ListDataPoliciesPager: - Response message for the - ListDataPolicies method. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datapolicy.ListDataPoliciesRequest): - request = datapolicy.ListDataPoliciesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_policies] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataPoliciesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy(self, - request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): - The request object. Request message for ``GetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.GetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def set_iam_policy(self, - request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM policy for the specified data policy. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): - The request object. Request message for ``SetIamPolicy`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.policy_pb2.Policy: - An Identity and Access Management (IAM) policy, which specifies access - controls for Google Cloud resources. - - A Policy is a collection of bindings. A binding binds - one or more members, or principals, to a single role. - Principals can be user accounts, service accounts, - Google groups, and domains (such as G Suite). A role - is a named list of permissions; each role can be an - IAM predefined role or a user-created custom role. - - For some types of Google Cloud resources, a binding - can also specify a condition, which is a logical - expression that allows access to a resource only if - the expression evaluates to true. A condition can add - constraints based on attributes of the request, the - resource, or both. To learn which resources support - conditions in their IAM policies, see the [IAM - documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). - - **JSON example:** - - :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` - - **YAML example:** - - :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` - - For a description of IAM and its features, see the - [IAM - documentation](\ https://cloud.google.com/iam/docs/). - - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.SetIamPolicyRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions(self, - request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Returns the caller's permission on the specified data - policy resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datapolicies_v1beta1 - from google.iam.v1 import iam_policy_pb2 # type: ignore - - def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): - The request object. Request message for ``TestIamPermissions`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: - Response message for TestIamPermissions method. - """ - # Create or coerce a protobuf request object. - if isinstance(request, dict): - # - The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - elif not request: - # Null request, just make one. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("resource", request.resource), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "DataPolicyServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataPolicyServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/pagers.py deleted file mode 100644 index c4d1cd7804e3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/pagers.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy - - -class ListDataPoliciesPager: - """A pager for iterating through ``list_data_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_policies`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataPolicies`` requests and continue to iterate - through the ``data_policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datapolicy.ListDataPoliciesResponse], - request: datapolicy.ListDataPoliciesRequest, - response: datapolicy.ListDataPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesRequest): - The initial request object. - response (google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datapolicy.ListDataPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datapolicy.ListDataPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datapolicy.DataPolicy]: - for page in self.pages: - yield from page.data_policies - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataPoliciesAsyncPager: - """A pager for iterating through ``list_data_policies`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_policies`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataPolicies`` requests and continue to iterate - through the ``data_policies`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datapolicy.ListDataPoliciesResponse]], - request: datapolicy.ListDataPoliciesRequest, - response: datapolicy.ListDataPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesRequest): - The initial request object. - response (google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datapolicy.ListDataPoliciesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datapolicy.ListDataPoliciesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datapolicy.DataPolicy]: - async def async_generator(): - async for page in self.pages: - for response in page.data_policies: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/README.rst deleted file mode 100644 index b83d156af245..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataPolicyServiceTransport` is the ABC for all transports. -- public child `DataPolicyServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataPolicyServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataPolicyServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataPolicyServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/__init__.py deleted file mode 100644 index 0973a0dc70ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataPolicyServiceTransport -from .grpc import DataPolicyServiceGrpcTransport -from .grpc_asyncio import DataPolicyServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataPolicyServiceTransport]] -_transport_registry['grpc'] = DataPolicyServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataPolicyServiceGrpcAsyncIOTransport - -__all__ = ( - 'DataPolicyServiceTransport', - 'DataPolicyServiceGrpcTransport', - 'DataPolicyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/base.py deleted file mode 100644 index 6c9ce399ad0d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/base.py +++ /dev/null @@ -1,328 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_datapolicies_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataPolicyServiceTransport(abc.ABC): - """Abstract transport class for DataPolicyService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigquerydatapolicy.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_data_policy: gapic_v1.method.wrap_method( - self.create_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_data_policy: gapic_v1.method.wrap_method( - self.update_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_data_policy: gapic_v1.method.wrap_method( - self.delete_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_data_policy: gapic_v1.method.wrap_method( - self.get_data_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_data_policies: gapic_v1.method.wrap_method( - self.list_data_policies, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: gapic_v1.method.wrap_method( - self.get_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: gapic_v1.method.wrap_method( - self.set_iam_policy, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: gapic_v1.method.wrap_method( - self.test_iam_permissions, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - Union[ - datapolicy.DataPolicy, - Awaitable[datapolicy.DataPolicy] - ]]: - raise NotImplementedError() - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - Union[ - datapolicy.ListDataPoliciesResponse, - Awaitable[datapolicy.ListDataPoliciesResponse] - ]]: - raise NotImplementedError() - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[ - policy_pb2.Policy, - Awaitable[policy_pb2.Policy] - ]]: - raise NotImplementedError() - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataPolicyServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/grpc.py deleted file mode 100644 index 1d74301fcef9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/grpc.py +++ /dev/null @@ -1,540 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataPolicyServiceGrpcTransport(DataPolicyServiceTransport): - """gRPC backend transport for DataPolicyService. - - Data Policy Service provides APIs for managing the - label-policy bindings. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the create data policy method over gRPC. - - Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - Returns: - Callable[[~.CreateDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_policy' not in self._stubs: - self._stubs['create_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/CreateDataPolicy', - request_serializer=datapolicy.CreateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['create_data_policy'] - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the update data policy method over gRPC. - - Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - Returns: - Callable[[~.UpdateDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_policy' not in self._stubs: - self._stubs['update_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/UpdateDataPolicy', - request_serializer=datapolicy.UpdateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['update_data_policy'] - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete data policy method over gRPC. - - Deletes the data policy specified by its resource - name. - - Returns: - Callable[[~.DeleteDataPolicyRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_policy' not in self._stubs: - self._stubs['delete_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/DeleteDataPolicy', - request_serializer=datapolicy.DeleteDataPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_policy'] - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - datapolicy.DataPolicy]: - r"""Return a callable for the get data policy method over gRPC. - - Gets the data policy specified by its resource name. - - Returns: - Callable[[~.GetDataPolicyRequest], - ~.DataPolicy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_policy' not in self._stubs: - self._stubs['get_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/GetDataPolicy', - request_serializer=datapolicy.GetDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['get_data_policy'] - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - datapolicy.ListDataPoliciesResponse]: - r"""Return a callable for the list data policies method over gRPC. - - List all of the data policies in the specified parent - project. - - Returns: - Callable[[~.ListDataPoliciesRequest], - ~.ListDataPoliciesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_policies' not in self._stubs: - self._stubs['list_data_policies'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/ListDataPolicies', - request_serializer=datapolicy.ListDataPoliciesRequest.serialize, - response_deserializer=datapolicy.ListDataPoliciesResponse.deserialize, - ) - return self._stubs['list_data_policies'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for the specified data policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for the specified data policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permission on the specified data - policy resource. - - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataPolicyServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/grpc_asyncio.py deleted file mode 100644 index 149ef997d395..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,667 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataPolicyServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataPolicyServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataPolicyServiceGrpcAsyncIOTransport(DataPolicyServiceTransport): - """gRPC AsyncIO backend transport for DataPolicyService. - - Data Policy Service provides APIs for managing the - label-policy bindings. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigquerydatapolicy.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatapolicy.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_data_policy(self) -> Callable[ - [datapolicy.CreateDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the create data policy method over gRPC. - - Creates a new data policy under a project with the given - ``dataPolicyId`` (used as the display name), policy tag, and - data policy type. - - Returns: - Callable[[~.CreateDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_data_policy' not in self._stubs: - self._stubs['create_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/CreateDataPolicy', - request_serializer=datapolicy.CreateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['create_data_policy'] - - @property - def update_data_policy(self) -> Callable[ - [datapolicy.UpdateDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the update data policy method over gRPC. - - Updates the metadata for an existing data policy. The - target data policy can be specified by the resource - name. - - Returns: - Callable[[~.UpdateDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_data_policy' not in self._stubs: - self._stubs['update_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/UpdateDataPolicy', - request_serializer=datapolicy.UpdateDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['update_data_policy'] - - @property - def delete_data_policy(self) -> Callable[ - [datapolicy.DeleteDataPolicyRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete data policy method over gRPC. - - Deletes the data policy specified by its resource - name. - - Returns: - Callable[[~.DeleteDataPolicyRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_data_policy' not in self._stubs: - self._stubs['delete_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/DeleteDataPolicy', - request_serializer=datapolicy.DeleteDataPolicyRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_data_policy'] - - @property - def get_data_policy(self) -> Callable[ - [datapolicy.GetDataPolicyRequest], - Awaitable[datapolicy.DataPolicy]]: - r"""Return a callable for the get data policy method over gRPC. - - Gets the data policy specified by its resource name. - - Returns: - Callable[[~.GetDataPolicyRequest], - Awaitable[~.DataPolicy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_policy' not in self._stubs: - self._stubs['get_data_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/GetDataPolicy', - request_serializer=datapolicy.GetDataPolicyRequest.serialize, - response_deserializer=datapolicy.DataPolicy.deserialize, - ) - return self._stubs['get_data_policy'] - - @property - def list_data_policies(self) -> Callable[ - [datapolicy.ListDataPoliciesRequest], - Awaitable[datapolicy.ListDataPoliciesResponse]]: - r"""Return a callable for the list data policies method over gRPC. - - List all of the data policies in the specified parent - project. - - Returns: - Callable[[~.ListDataPoliciesRequest], - Awaitable[~.ListDataPoliciesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_policies' not in self._stubs: - self._stubs['list_data_policies'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/ListDataPolicies', - request_serializer=datapolicy.ListDataPoliciesRequest.serialize, - response_deserializer=datapolicy.ListDataPoliciesResponse.deserialize, - ) - return self._stubs['list_data_policies'] - - @property - def get_iam_policy(self) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - - Gets the IAM policy for the specified data policy. - - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_iam_policy' not in self._stubs: - self._stubs['get_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/GetIamPolicy', - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['get_iam_policy'] - - @property - def set_iam_policy(self) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - - Sets the IAM policy for the specified data policy. - - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'set_iam_policy' not in self._stubs: - self._stubs['set_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/SetIamPolicy', - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs['set_iam_policy'] - - @property - def test_iam_permissions(self) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]: - r"""Return a callable for the test iam permissions method over gRPC. - - Returns the caller's permission on the specified data - policy resource. - - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'test_iam_permissions' not in self._stubs: - self._stubs['test_iam_permissions'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService/TestIamPermissions', - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs['test_iam_permissions'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_data_policy: self._wrap_method( - self.create_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_data_policy: self._wrap_method( - self.update_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_data_policy: self._wrap_method( - self.delete_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_data_policy: self._wrap_method( - self.get_data_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_data_policies: self._wrap_method( - self.list_data_policies, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_iam_policy: self._wrap_method( - self.get_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.set_iam_policy: self._wrap_method( - self.set_iam_policy, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.test_iam_permissions: self._wrap_method( - self.test_iam_permissions, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'DataPolicyServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/types/__init__.py deleted file mode 100644 index 602ecf6e5e6d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/types/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .datapolicy import ( - CreateDataPolicyRequest, - DataMaskingPolicy, - DataPolicy, - DeleteDataPolicyRequest, - GetDataPolicyRequest, - ListDataPoliciesRequest, - ListDataPoliciesResponse, - UpdateDataPolicyRequest, -) - -__all__ = ( - 'CreateDataPolicyRequest', - 'DataMaskingPolicy', - 'DataPolicy', - 'DeleteDataPolicyRequest', - 'GetDataPolicyRequest', - 'ListDataPoliciesRequest', - 'ListDataPoliciesResponse', - 'UpdateDataPolicyRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/types/datapolicy.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/types/datapolicy.py deleted file mode 100644 index 4bb922a09583..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/google/cloud/bigquery_datapolicies_v1beta1/types/datapolicy.py +++ /dev/null @@ -1,320 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.datapolicies.v1beta1', - manifest={ - 'CreateDataPolicyRequest', - 'UpdateDataPolicyRequest', - 'DeleteDataPolicyRequest', - 'GetDataPolicyRequest', - 'ListDataPoliciesRequest', - 'ListDataPoliciesResponse', - 'DataPolicy', - 'DataMaskingPolicy', - }, -) - - -class CreateDataPolicyRequest(proto.Message): - r"""Request message for the CreateDataPolicy method. - - Attributes: - parent (str): - Required. Resource name of the project that the data policy - will belong to. The format is - ``projects/{project_number}/locations/{location_id}``. - data_policy (google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy): - Required. The data policy to create. The ``name`` field does - not need to be provided for the data policy creation. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_policy: 'DataPolicy' = proto.Field( - proto.MESSAGE, - number=2, - message='DataPolicy', - ) - - -class UpdateDataPolicyRequest(proto.Message): - r"""Response message for the UpdateDataPolicy method. - - Attributes: - data_policy (google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy): - Required. Update the data policy's metadata. - - The target data policy is determined by the ``name`` field. - Other fields are updated to the specified values based on - the field masks. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - The update mask applies to the resource. For the - ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - If not set, defaults to all of the fields that are allowed - to update. - - Updates to the ``name`` and ``dataPolicyId`` fields are not - allowed. - """ - - data_policy: 'DataPolicy' = proto.Field( - proto.MESSAGE, - number=1, - message='DataPolicy', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class DeleteDataPolicyRequest(proto.Message): - r"""Request message for the DeleteDataPolicy method. - - Attributes: - name (str): - Required. Resource name of the data policy to delete. Format - is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetDataPolicyRequest(proto.Message): - r"""Request message for the GetDataPolicy method. - - Attributes: - name (str): - Required. Resource name of the requested data policy. Format - is - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataPoliciesRequest(proto.Message): - r"""Request message for the ListDataPolicies method. - - Attributes: - parent (str): - Required. Resource name of the project for which to list - data policies. Format is - ``projects/{project_number}/locations/{location_id}``. - page_size (int): - The maximum number of data policies to - return. Must be a value between 1 and 1000. - If not set, defaults to 50. - page_token (str): - The ``nextPageToken`` value returned from a previous list - request, if any. If not set, defaults to an empty string. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListDataPoliciesResponse(proto.Message): - r"""Response message for the ListDataPolicies method. - - Attributes: - data_policies (MutableSequence[google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy]): - Data policies that belong to the requested - project. - next_page_token (str): - Token used to retrieve the next page of - results, or empty if there are no more results. - """ - - @property - def raw_page(self): - return self - - data_policies: MutableSequence['DataPolicy'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataPolicy', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DataPolicy(proto.Message): - r"""Represents the label-policy binding. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - policy_tag (str): - Policy tag resource name, in the format of - ``projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{policyTag_id}``. - - This field is a member of `oneof`_ ``matching_label``. - data_masking_policy (google.cloud.bigquery_datapolicies_v1beta1.types.DataMaskingPolicy): - The data masking policy that specifies the - data masking rule to use. - - This field is a member of `oneof`_ ``policy``. - name (str): - Output only. Resource name of this data policy, in the - format of - ``projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}``. - data_policy_type (google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy.DataPolicyType): - Type of data policy. - data_policy_id (str): - User-assigned (human readable) ID of the data policy that - needs to be unique within a project. Used as - {data_policy_id} in part of the resource name. - """ - class DataPolicyType(proto.Enum): - r"""A list of supported data policy types. - - Values: - DATA_POLICY_TYPE_UNSPECIFIED (0): - Default value for the data policy type. This - should not be used. - COLUMN_LEVEL_SECURITY_POLICY (3): - Used to create a data policy for column-level - security, without data masking. - DATA_MASKING_POLICY (2): - Used to create a data policy for data - masking. - """ - DATA_POLICY_TYPE_UNSPECIFIED = 0 - COLUMN_LEVEL_SECURITY_POLICY = 3 - DATA_MASKING_POLICY = 2 - - policy_tag: str = proto.Field( - proto.STRING, - number=4, - oneof='matching_label', - ) - data_masking_policy: 'DataMaskingPolicy' = proto.Field( - proto.MESSAGE, - number=5, - oneof='policy', - message='DataMaskingPolicy', - ) - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_policy_type: DataPolicyType = proto.Field( - proto.ENUM, - number=2, - enum=DataPolicyType, - ) - data_policy_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DataMaskingPolicy(proto.Message): - r"""The data masking policy that is used to specify data masking - rule. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - predefined_expression (google.cloud.bigquery_datapolicies_v1beta1.types.DataMaskingPolicy.PredefinedExpression): - A predefined masking expression. - - This field is a member of `oneof`_ ``masking_expression``. - """ - class PredefinedExpression(proto.Enum): - r"""The available masking rules. Learn more here: - https://cloud.google.com/bigquery/docs/column-data-masking-intro#masking_options. - - Values: - PREDEFINED_EXPRESSION_UNSPECIFIED (0): - Default, unspecified predefined expression. - No masking will take place since no expression - is specified. - SHA256 (3): - Masking expression to replace data with - SHA-256 hash. - ALWAYS_NULL (5): - Masking expression to replace data with - NULLs. - DEFAULT_MASKING_VALUE (7): - Masking expression to replace data with their default - masking values. The default masking values for each type - listed as below: - - - STRING: "" - - BYTES: b'' - - INTEGER: 0 - - FLOAT: 0.0 - - NUMERIC: 0 - - BOOLEAN: FALSE - - TIMESTAMP: 0001-01-01 00:00:00 UTC - - DATE: 0001-01-01 - - TIME: 00:00:00 - - DATETIME: 0001-01-01T00:00:00 - - GEOGRAPHY: POINT(0 0) - - BIGNUMERIC: 0 - - ARRAY: [] - - STRUCT: NOT_APPLICABLE - - JSON: NULL - """ - PREDEFINED_EXPRESSION_UNSPECIFIED = 0 - SHA256 = 3 - ALWAYS_NULL = 5 - DEFAULT_MASKING_VALUE = 7 - - predefined_expression: PredefinedExpression = proto.Field( - proto.ENUM, - number=1, - oneof='masking_expression', - enum=PredefinedExpression, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/noxfile.py deleted file mode 100644 index 5c43409ff3e6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-datapolicies' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_datapolicies_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_datapolicies_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_async.py deleted file mode 100644 index 633d3361157b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_CreateDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -async def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = await client.create_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_CreateDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_sync.py deleted file mode 100644 index 3a173e343af4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_CreateDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -def sample_create_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.CreateDataPolicyRequest( - parent="parent_value", - data_policy=data_policy, - ) - - # Make the request - response = client.create_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_CreateDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_async.py deleted file mode 100644 index 3c443a9099df..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_DeleteDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -async def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - await client.delete_data_policy(request=request) - - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_DeleteDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_sync.py deleted file mode 100644 index 98843535092a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_DeleteDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -def sample_delete_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.DeleteDataPolicyRequest( - name="name_value", - ) - - # Make the request - client.delete_data_policy(request=request) - - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_DeleteDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_async.py deleted file mode 100644 index 1bac652383dd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -async def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_sync.py deleted file mode 100644 index b98b52e770de..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -def sample_get_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.GetDataPolicyRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_async.py deleted file mode 100644 index 6854228d52e7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_sync.py deleted file mode 100644 index c007d04afc09..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_get_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.GetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.get_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_async.py deleted file mode 100644 index 585021b546e5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_ListDataPolicies_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -async def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_ListDataPolicies_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_sync.py deleted file mode 100644 index 763b5739bc74..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataPolicies -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_ListDataPolicies_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -def sample_list_data_policies(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = bigquery_datapolicies_v1beta1.ListDataPoliciesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_policies(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_ListDataPolicies_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_async.py deleted file mode 100644 index 378ddb6ffbc8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_SetIamPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = await client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_SetIamPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_sync.py deleted file mode 100644 index 9e4c387cbc53..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SetIamPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_SetIamPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_set_iam_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.SetIamPolicyRequest( - resource="resource_value", - ) - - # Make the request - response = client.set_iam_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_SetIamPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_async.py deleted file mode 100644 index 5fa546e07ccc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_TestIamPermissions_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -async def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = await client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_TestIamPermissions_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_sync.py deleted file mode 100644 index 890bf34cbf1e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TestIamPermissions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_TestIamPermissions_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 -from google.iam.v1 import iam_policy_pb2 # type: ignore - - -def sample_test_iam_permissions(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - request = iam_policy_pb2.TestIamPermissionsRequest( - resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], - ) - - # Make the request - response = client.test_iam_permissions(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_TestIamPermissions_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_async.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_async.py deleted file mode 100644 index 15e8beadbdd3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_UpdateDataPolicy_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -async def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = await client.update_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_UpdateDataPolicy_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_sync.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_sync.py deleted file mode 100644 index 38d06f4c80f3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateDataPolicy -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datapolicies - - -# [START bigquerydatapolicy_v1beta1_generated_DataPolicyService_UpdateDataPolicy_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datapolicies_v1beta1 - - -def sample_update_data_policy(): - # Create a client - client = bigquery_datapolicies_v1beta1.DataPolicyServiceClient() - - # Initialize request argument(s) - data_policy = bigquery_datapolicies_v1beta1.DataPolicy() - data_policy.policy_tag = "policy_tag_value" - data_policy.data_masking_policy.predefined_expression = "DEFAULT_MASKING_VALUE" - - request = bigquery_datapolicies_v1beta1.UpdateDataPolicyRequest( - data_policy=data_policy, - ) - - # Make the request - response = client.update_data_policy(request=request) - - # Handle the response - print(response) - -# [END bigquerydatapolicy_v1beta1_generated_DataPolicyService_UpdateDataPolicy_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json deleted file mode 100644 index f5896d1aac3d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ /dev/null @@ -1,1289 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.datapolicies.v1beta1", - "version": "v1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-datapolicies", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.create_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.CreateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "CreateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.CreateDataPolicyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy", - "shortName": "create_data_policy" - }, - "description": "Sample for CreateDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_CreateDataPolicy_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.create_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.CreateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "CreateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.CreateDataPolicyRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy", - "shortName": "create_data_policy" - }, - "description": "Sample for CreateDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_CreateDataPolicy_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 50, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 53, - "start": 51, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 54, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_create_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.delete_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.DeleteDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "DeleteDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.DeleteDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_policy" - }, - "description": "Sample for DeleteDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_DeleteDataPolicy_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.delete_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.DeleteDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "DeleteDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.DeleteDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_data_policy" - }, - "description": "Sample for DeleteDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_DeleteDataPolicy_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_delete_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.get_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.GetDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.GetDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy", - "shortName": "get_data_policy" - }, - "description": "Sample for GetDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetDataPolicy_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.get_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.GetDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.GetDataPolicyRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy", - "shortName": "get_data_policy" - }, - "description": "Sample for GetDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetDataPolicy_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_data_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.get_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.GetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "GetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "get_iam_policy" - }, - "description": "Sample for GetIamPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_GetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_get_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.list_data_policies", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.ListDataPolicies", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "ListDataPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.pagers.ListDataPoliciesAsyncPager", - "shortName": "list_data_policies" - }, - "description": "Sample for ListDataPolicies", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_ListDataPolicies_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.list_data_policies", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.ListDataPolicies", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "ListDataPolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.ListDataPoliciesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.pagers.ListDataPoliciesPager", - "shortName": "list_data_policies" - }, - "description": "Sample for ListDataPolicies", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_ListDataPolicies_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_list_data_policies_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_SetIamPolicy_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.set_iam_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.SetIamPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "SetIamPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.policy_pb2.Policy", - "shortName": "set_iam_policy" - }, - "description": "Sample for SetIamPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_SetIamPolicy_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_set_iam_policy_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_TestIamPermissions_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.test_iam_permissions", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.TestIamPermissions", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "TestIamPermissions" - }, - "parameters": [ - { - "name": "request", - "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", - "shortName": "test_iam_permissions" - }, - "description": "Sample for TestIamPermissions", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_TestIamPermissions_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 41, - "start": 39, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 42, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_test_iam_permissions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient", - "shortName": "DataPolicyServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceAsyncClient.update_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.UpdateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "UpdateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.UpdateDataPolicyRequest" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy", - "shortName": "update_data_policy" - }, - "description": "Sample for UpdateDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_UpdateDataPolicy_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient", - "shortName": "DataPolicyServiceClient" - }, - "fullName": "google.cloud.bigquery_datapolicies_v1beta1.DataPolicyServiceClient.update_data_policy", - "method": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService.UpdateDataPolicy", - "service": { - "fullName": "google.cloud.bigquery.datapolicies.v1beta1.DataPolicyService", - "shortName": "DataPolicyService" - }, - "shortName": "UpdateDataPolicy" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.UpdateDataPolicyRequest" - }, - { - "name": "data_policy", - "type": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datapolicies_v1beta1.types.DataPolicy", - "shortName": "update_data_policy" - }, - "description": "Sample for UpdateDataPolicy", - "file": "bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatapolicy_v1beta1_generated_DataPolicyService_UpdateDataPolicy_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatapolicy_v1beta1_generated_data_policy_service_update_data_policy_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/scripts/fixup_bigquery_datapolicies_v1beta1_keywords.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/scripts/fixup_bigquery_datapolicies_v1beta1_keywords.py deleted file mode 100644 index ad86e1273c44..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/scripts/fixup_bigquery_datapolicies_v1beta1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_datapoliciesCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_data_policy': ('parent', 'data_policy', ), - 'delete_data_policy': ('name', ), - 'get_data_policy': ('name', ), - 'get_iam_policy': ('resource', 'options', ), - 'list_data_policies': ('parent', 'page_size', 'page_token', ), - 'set_iam_policy': ('resource', 'policy', 'update_mask', ), - 'test_iam_permissions': ('resource', 'permissions', ), - 'update_data_policy': ('data_policy', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_datapoliciesCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_datapolicies client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/setup.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/setup.py deleted file mode 100644 index 474139ddf62e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/setup.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-datapolicies' - - -description = "Google Cloud Bigquery Datapolicies API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_datapolicies/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datapolicies" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.13.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.7.txt deleted file mode 100644 index fb7e93a1b473..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 -grpc-google-iam-v1==0.14.0 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ad3f0fa58e2d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/bigquery_datapolicies_v1beta1/__init__.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/bigquery_datapolicies_v1beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/bigquery_datapolicies_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py b/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py deleted file mode 100644 index 7a8638d9e886..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datapolicies/v1beta1/tests/unit/gapic/bigquery_datapolicies_v1beta1/test_data_policy_service.py +++ /dev/null @@ -1,4346 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service import DataPolicyServiceAsyncClient -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service import DataPolicyServiceClient -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service import pagers -from google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service import transports -from google.cloud.bigquery_datapolicies_v1beta1.types import datapolicy -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.type import expr_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataPolicyServiceClient._get_default_mtls_endpoint(None) is None - assert DataPolicyServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataPolicyServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataPolicyServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataPolicyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataPolicyServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataPolicyServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataPolicyServiceClient._get_client_cert_source(None, False) is None - assert DataPolicyServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataPolicyServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataPolicyServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataPolicyServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataPolicyServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataPolicyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataPolicyServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataPolicyServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataPolicyServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataPolicyServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataPolicyServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataPolicyServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataPolicyServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataPolicyServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataPolicyServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataPolicyServiceClient._get_universe_domain(None, None) == DataPolicyServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataPolicyServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DataPolicyServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DataPolicyServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataPolicyServiceClient, "grpc"), - (DataPolicyServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_policy_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataPolicyServiceGrpcTransport, "grpc"), - (transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_policy_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataPolicyServiceClient, "grpc"), - (DataPolicyServiceAsyncClient, "grpc_asyncio"), -]) -def test_data_policy_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:443' - ) - - -def test_data_policy_service_client_get_transport_class(): - transport = DataPolicyServiceClient.get_transport_class() - available_transports = [ - transports.DataPolicyServiceGrpcTransport, - ] - assert transport in available_transports - - transport = DataPolicyServiceClient.get_transport_class("grpc") - assert transport == transports.DataPolicyServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -def test_data_policy_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataPolicyServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataPolicyServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", "true"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", "false"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_policy_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataPolicyServiceClient, DataPolicyServiceAsyncClient -]) -@mock.patch.object(DataPolicyServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataPolicyServiceAsyncClient)) -def test_data_policy_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataPolicyServiceClient, DataPolicyServiceAsyncClient -]) -@mock.patch.object(DataPolicyServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceClient)) -@mock.patch.object(DataPolicyServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataPolicyServiceAsyncClient)) -def test_data_policy_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataPolicyServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataPolicyServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc"), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_data_policy_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", grpc_helpers), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_policy_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_policy_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.transports.DataPolicyServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataPolicyServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport, "grpc", grpc_helpers), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_policy_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigquerydatapolicy.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigquerydatapolicy.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.CreateDataPolicyRequest, - dict, -]) -def test_create_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.CreateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_create_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.CreateDataPolicyRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.CreateDataPolicyRequest( - parent='parent_value', - ) - -def test_create_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_data_policy] = mock_rpc - request = {} - client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_data_policy] = mock_rpc - - request = {} - await client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.CreateDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.CreateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_create_data_policy_async_from_dict(): - await test_create_data_policy_async(request_type=dict) - -def test_create_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.CreateDataPolicyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.CreateDataPolicyRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.create_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_data_policy( - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - - -def test_create_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_data_policy( - datapolicy.CreateDataPolicyRequest(), - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - -@pytest.mark.asyncio -async def test_create_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_data_policy( - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_data_policy( - datapolicy.CreateDataPolicyRequest(), - parent='parent_value', - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.UpdateDataPolicyRequest, - dict, -]) -def test_update_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.UpdateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_update_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.UpdateDataPolicyRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.UpdateDataPolicyRequest( - ) - -def test_update_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_data_policy] = mock_rpc - request = {} - client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_data_policy] = mock_rpc - - request = {} - await client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.UpdateDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.UpdateDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_update_data_policy_async_from_dict(): - await test_update_data_policy_async(request_type=dict) - -def test_update_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.UpdateDataPolicyRequest() - - request.data_policy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_policy.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.UpdateDataPolicyRequest() - - request.data_policy.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.update_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'data_policy.name=name_value', - ) in kw['metadata'] - - -def test_update_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_data_policy( - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_data_policy( - datapolicy.UpdateDataPolicyRequest(), - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_data_policy( - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].data_policy - mock_val = datapolicy.DataPolicy(policy_tag='policy_tag_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_data_policy( - datapolicy.UpdateDataPolicyRequest(), - data_policy=datapolicy.DataPolicy(policy_tag='policy_tag_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.DeleteDataPolicyRequest, - dict, -]) -def test_delete_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.DeleteDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.DeleteDataPolicyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.DeleteDataPolicyRequest( - name='name_value', - ) - -def test_delete_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_data_policy] = mock_rpc - request = {} - client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_data_policy] = mock_rpc - - request = {} - await client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.DeleteDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.DeleteDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_data_policy_async_from_dict(): - await test_delete_data_policy_async(request_type=dict) - -def test_delete_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.DeleteDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value = None - client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.DeleteDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_data_policy( - datapolicy.DeleteDataPolicyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_data_policy( - datapolicy.DeleteDataPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.GetDataPolicyRequest, - dict, -]) -def test_get_data_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - policy_tag='policy_tag_value', - ) - response = client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.GetDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -def test_get_data_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.GetDataPolicyRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.GetDataPolicyRequest( - name='name_value', - ) - -def test_get_data_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_policy] = mock_rpc - request = {} - client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_policy] = mock_rpc - - request = {} - await client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_policy_async(transport: str = 'grpc_asyncio', request_type=datapolicy.GetDataPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - response = await client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.GetDataPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datapolicy.DataPolicy) - assert response.name == 'name_value' - assert response.data_policy_type == datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY - assert response.data_policy_id == 'data_policy_id_value' - - -@pytest.mark.asyncio -async def test_get_data_policy_async_from_dict(): - await test_get_data_policy_async(request_type=dict) - -def test_get_data_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.GetDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.GetDataPolicyRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - await client.get_data_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_policy_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_policy_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_policy( - datapolicy.GetDataPolicyRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_policy_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.DataPolicy() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_policy( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_policy_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_policy( - datapolicy.GetDataPolicyRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datapolicy.ListDataPoliciesRequest, - dict, -]) -def test_list_data_policies(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datapolicy.ListDataPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_policies_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datapolicy.ListDataPoliciesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_policies(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datapolicy.ListDataPoliciesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_data_policies_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_policies in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_policies] = mock_rpc - request = {} - client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_policies in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_policies] = mock_rpc - - request = {} - await client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_policies(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_policies_async(transport: str = 'grpc_asyncio', request_type=datapolicy.ListDataPoliciesRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datapolicy.ListDataPoliciesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_policies_async_from_dict(): - await test_list_data_policies_async(request_type=dict) - -def test_list_data_policies_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.ListDataPoliciesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value = datapolicy.ListDataPoliciesResponse() - client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_policies_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datapolicy.ListDataPoliciesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse()) - await client.list_data_policies(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_policies_flattened(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.ListDataPoliciesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_policies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_policies_flattened_error(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_policies( - datapolicy.ListDataPoliciesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_policies_flattened_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datapolicy.ListDataPoliciesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_policies( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_policies_flattened_error_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_policies( - datapolicy.ListDataPoliciesRequest(), - parent='parent_value', - ) - - -def test_list_data_policies_pager(transport_name: str = "grpc"): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_policies(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datapolicy.DataPolicy) - for i in results) -def test_list_data_policies_pages(transport_name: str = "grpc"): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_policies(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_policies_async_pager(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_policies(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datapolicy.DataPolicy) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_policies_async_pages(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - next_page_token='abc', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[], - next_page_token='def', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - ], - next_page_token='ghi', - ), - datapolicy.ListDataPoliciesResponse( - data_policies=[ - datapolicy.DataPolicy(), - datapolicy.DataPolicy(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_policies(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_get_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.GetIamPolicyRequest( - resource='resource_value', - ) - -def test_get_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc - request = {} - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_iam_policy] = mock_rpc - - request = {} - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.GetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_get_iam_policy_async_from_dict(): - await test_get_iam_policy_async(request_type=dict) - -def test_get_iam_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_get_iam_policy_from_dict_foreign(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.get_iam_policy(request={ - 'resource': 'resource_value', - 'options': options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b'etag_blob', - ) - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -def test_set_iam_policy_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.set_iam_policy(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.SetIamPolicyRequest( - resource='resource_value', - ) - -def test_set_iam_policy_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.set_iam_policy in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc - request = {} - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.set_iam_policy in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.set_iam_policy] = mock_rpc - - request = {} - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.set_iam_policy(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.SetIamPolicyRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - assert response.version == 774 - assert response.etag == b'etag_blob' - - -@pytest.mark.asyncio -async def test_set_iam_policy_async_from_dict(): - await test_set_iam_policy_async(request_type=dict) - -def test_set_iam_policy_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_set_iam_policy_from_dict_foreign(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - response = client.set_iam_policy(request={ - 'resource': 'resource_value', - 'policy': policy_pb2.Policy(version=774), - 'update_mask': field_mask_pb2.FieldMask(paths=['paths_value']), - } - ) - call.assert_called() - - -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions(request_type, transport: str = 'grpc'): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - ) - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.test_iam_permissions(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( - resource='resource_value', - ) - -def test_test_iam_permissions_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.test_iam_permissions in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.test_iam_permissions] = mock_rpc - request = {} - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.test_iam_permissions in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.test_iam_permissions] = mock_rpc - - request = {} - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.test_iam_permissions(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = 'grpc_asyncio', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = iam_policy_pb2.TestIamPermissionsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - assert response.permissions == ['permissions_value'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async_from_dict(): - await test_test_iam_permissions_async(request_type=dict) - -def test_test_iam_permissions_field_headers(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - - request.resource = 'resource_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse()) - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'resource=resource_value', - ) in kw['metadata'] - -def test_test_iam_permissions_from_dict_foreign(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - response = client.test_iam_permissions(request={ - 'resource': 'resource_value', - 'permissions': ['permissions_value'], - } - ) - call.assert_called() - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataPolicyServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataPolicyServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataPolicyServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataPolicyServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataPolicyServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.create_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.CreateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.update_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.UpdateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - call.return_value = None - client.delete_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.DeleteDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - call.return_value = datapolicy.DataPolicy() - client.get_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.GetDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_policies_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - call.return_value = datapolicy.ListDataPoliciesResponse() - client.list_data_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.ListDataPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_iam_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_set_iam_policy_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - call.return_value = policy_pb2.Policy() - client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_test_iam_permissions_empty_call_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataPolicyServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.create_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.CreateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.update_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.UpdateDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.DeleteDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.DataPolicy( - name='name_value', - data_policy_type=datapolicy.DataPolicy.DataPolicyType.COLUMN_LEVEL_SECURITY_POLICY, - data_policy_id='data_policy_id_value', - )) - await client.get_data_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.GetDataPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_policies_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datapolicy.ListDataPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_policies(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datapolicy.ListDataPoliciesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_iam_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.get_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.GetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_set_iam_policy_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy( - version=774, - etag=b'etag_blob', - )) - await client.set_iam_policy(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.SetIamPolicyRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_test_iam_permissions_empty_call_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(iam_policy_pb2.TestIamPermissionsResponse( - permissions=['permissions_value'], - )) - await client.test_iam_permissions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = iam_policy_pb2.TestIamPermissionsRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataPolicyServiceGrpcTransport, - ) - -def test_data_policy_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_policy_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.transports.DataPolicyServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataPolicyServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_data_policy', - 'update_data_policy', - 'delete_data_policy', - 'get_data_policy', - 'list_data_policies', - 'get_iam_policy', - 'set_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_policy_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.transports.DataPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataPolicyServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_policy_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_datapolicies_v1beta1.services.data_policy_service.transports.DataPolicyServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataPolicyServiceTransport() - adc.assert_called_once() - - -def test_data_policy_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataPolicyServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceGrpcAsyncIOTransport, - ], -) -def test_data_policy_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataPolicyServiceGrpcTransport, - transports.DataPolicyServiceGrpcAsyncIOTransport, - ], -) -def test_data_policy_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataPolicyServiceGrpcTransport, grpc_helpers), - (transports.DataPolicyServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_policy_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigquerydatapolicy.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigquerydatapolicy.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataPolicyServiceGrpcTransport, transports.DataPolicyServiceGrpcAsyncIOTransport]) -def test_data_policy_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_policy_service_host_no_port(transport_name): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerydatapolicy.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_data_policy_service_host_with_port(transport_name): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerydatapolicy.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerydatapolicy.googleapis.com:8000' - ) - -def test_data_policy_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataPolicyServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_policy_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataPolicyServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataPolicyServiceGrpcTransport, transports.DataPolicyServiceGrpcAsyncIOTransport]) -def test_data_policy_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataPolicyServiceGrpcTransport, transports.DataPolicyServiceGrpcAsyncIOTransport]) -def test_data_policy_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_policy_path(): - project = "squid" - location = "clam" - data_policy = "whelk" - expected = "projects/{project}/locations/{location}/dataPolicies/{data_policy}".format(project=project, location=location, data_policy=data_policy, ) - actual = DataPolicyServiceClient.data_policy_path(project, location, data_policy) - assert expected == actual - - -def test_parse_data_policy_path(): - expected = { - "project": "octopus", - "location": "oyster", - "data_policy": "nudibranch", - } - path = DataPolicyServiceClient.data_policy_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_data_policy_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataPolicyServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = DataPolicyServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataPolicyServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = DataPolicyServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataPolicyServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = DataPolicyServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = DataPolicyServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = DataPolicyServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataPolicyServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = DataPolicyServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataPolicyServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataPolicyServiceTransport, '_prep_wrapped_messages') as prep: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataPolicyServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataPolicyServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataPolicyServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = DataPolicyServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataPolicyServiceClient, transports.DataPolicyServiceGrpcTransport), - (DataPolicyServiceAsyncClient, transports.DataPolicyServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/.coveragerc deleted file mode 100644 index 691a23ba5e55..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_datatransfer/__init__.py - google/cloud/bigquery_datatransfer/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/.flake8 b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/MANIFEST.in deleted file mode 100644 index c429b83c0669..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_datatransfer *.py -recursive-include google/cloud/bigquery_datatransfer_v1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/README.rst b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/README.rst deleted file mode 100644 index 502259ac5bbf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Datatransfer API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Datatransfer API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/data_transfer_service.rst b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/data_transfer_service.rst deleted file mode 100644 index 480f43ed3d30..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/data_transfer_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DataTransferService -------------------------------------- - -.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/services_.rst b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/services_.rst deleted file mode 100644 index 37a71a43a4ea..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Datatransfer v1 API -====================================================== -.. toctree:: - :maxdepth: 2 - - data_transfer_service diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/types_.rst b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/types_.rst deleted file mode 100644 index ccda83a5d7b2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/bigquery_datatransfer_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Datatransfer v1 API -=================================================== - -.. automodule:: google.cloud.bigquery_datatransfer_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/conf.py deleted file mode 100644 index 14d2c649cae4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-datatransfer documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-datatransfer" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-datatransfer-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-datatransfer.tex", - u"google-cloud-bigquery-datatransfer Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-datatransfer", - u"Google Cloud Bigquery Datatransfer Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-datatransfer", - u"google-cloud-bigquery-datatransfer Documentation", - author, - "google-cloud-bigquery-datatransfer", - "GAPIC library for Google Cloud Bigquery Datatransfer API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/index.rst deleted file mode 100644 index 25313b46857a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_datatransfer_v1/services_ - bigquery_datatransfer_v1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/__init__.py deleted file mode 100644 index 00ce36925f18..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/__init__.py +++ /dev/null @@ -1,103 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_datatransfer import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.client import DataTransferServiceClient -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.async_client import DataTransferServiceAsyncClient - -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import CheckValidCredsRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import CheckValidCredsResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import CreateTransferConfigRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import DataSource -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import DataSourceParameter -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import DeleteTransferConfigRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import DeleteTransferRunRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import EnrollDataSourcesRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import GetDataSourceRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import GetTransferConfigRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import GetTransferRunRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListDataSourcesRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListDataSourcesResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListTransferConfigsRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListTransferConfigsResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListTransferLogsRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListTransferLogsResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListTransferRunsRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ListTransferRunsResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ScheduleTransferRunsRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import ScheduleTransferRunsResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import StartManualTransferRunsRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import StartManualTransferRunsResponse -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import UnenrollDataSourcesRequest -from google.cloud.bigquery_datatransfer_v1.types.datatransfer import UpdateTransferConfigRequest -from google.cloud.bigquery_datatransfer_v1.types.transfer import EmailPreferences -from google.cloud.bigquery_datatransfer_v1.types.transfer import EncryptionConfiguration -from google.cloud.bigquery_datatransfer_v1.types.transfer import EventDrivenSchedule -from google.cloud.bigquery_datatransfer_v1.types.transfer import ManualSchedule -from google.cloud.bigquery_datatransfer_v1.types.transfer import ScheduleOptions -from google.cloud.bigquery_datatransfer_v1.types.transfer import ScheduleOptionsV2 -from google.cloud.bigquery_datatransfer_v1.types.transfer import TimeBasedSchedule -from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferConfig -from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferMessage -from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferRun -from google.cloud.bigquery_datatransfer_v1.types.transfer import UserInfo -from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferState -from google.cloud.bigquery_datatransfer_v1.types.transfer import TransferType - -__all__ = ('DataTransferServiceClient', - 'DataTransferServiceAsyncClient', - 'CheckValidCredsRequest', - 'CheckValidCredsResponse', - 'CreateTransferConfigRequest', - 'DataSource', - 'DataSourceParameter', - 'DeleteTransferConfigRequest', - 'DeleteTransferRunRequest', - 'EnrollDataSourcesRequest', - 'GetDataSourceRequest', - 'GetTransferConfigRequest', - 'GetTransferRunRequest', - 'ListDataSourcesRequest', - 'ListDataSourcesResponse', - 'ListTransferConfigsRequest', - 'ListTransferConfigsResponse', - 'ListTransferLogsRequest', - 'ListTransferLogsResponse', - 'ListTransferRunsRequest', - 'ListTransferRunsResponse', - 'ScheduleTransferRunsRequest', - 'ScheduleTransferRunsResponse', - 'StartManualTransferRunsRequest', - 'StartManualTransferRunsResponse', - 'UnenrollDataSourcesRequest', - 'UpdateTransferConfigRequest', - 'EmailPreferences', - 'EncryptionConfiguration', - 'EventDrivenSchedule', - 'ManualSchedule', - 'ScheduleOptions', - 'ScheduleOptionsV2', - 'TimeBasedSchedule', - 'TransferConfig', - 'TransferMessage', - 'TransferRun', - 'UserInfo', - 'TransferState', - 'TransferType', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/py.typed b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/py.typed deleted file mode 100644 index 1bd9d383cee6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-datatransfer package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/__init__.py deleted file mode 100644 index 19a67aed2401..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.data_transfer_service import DataTransferServiceClient -from .services.data_transfer_service import DataTransferServiceAsyncClient - -from .types.datatransfer import CheckValidCredsRequest -from .types.datatransfer import CheckValidCredsResponse -from .types.datatransfer import CreateTransferConfigRequest -from .types.datatransfer import DataSource -from .types.datatransfer import DataSourceParameter -from .types.datatransfer import DeleteTransferConfigRequest -from .types.datatransfer import DeleteTransferRunRequest -from .types.datatransfer import EnrollDataSourcesRequest -from .types.datatransfer import GetDataSourceRequest -from .types.datatransfer import GetTransferConfigRequest -from .types.datatransfer import GetTransferRunRequest -from .types.datatransfer import ListDataSourcesRequest -from .types.datatransfer import ListDataSourcesResponse -from .types.datatransfer import ListTransferConfigsRequest -from .types.datatransfer import ListTransferConfigsResponse -from .types.datatransfer import ListTransferLogsRequest -from .types.datatransfer import ListTransferLogsResponse -from .types.datatransfer import ListTransferRunsRequest -from .types.datatransfer import ListTransferRunsResponse -from .types.datatransfer import ScheduleTransferRunsRequest -from .types.datatransfer import ScheduleTransferRunsResponse -from .types.datatransfer import StartManualTransferRunsRequest -from .types.datatransfer import StartManualTransferRunsResponse -from .types.datatransfer import UnenrollDataSourcesRequest -from .types.datatransfer import UpdateTransferConfigRequest -from .types.transfer import EmailPreferences -from .types.transfer import EncryptionConfiguration -from .types.transfer import EventDrivenSchedule -from .types.transfer import ManualSchedule -from .types.transfer import ScheduleOptions -from .types.transfer import ScheduleOptionsV2 -from .types.transfer import TimeBasedSchedule -from .types.transfer import TransferConfig -from .types.transfer import TransferMessage -from .types.transfer import TransferRun -from .types.transfer import UserInfo -from .types.transfer import TransferState -from .types.transfer import TransferType - -__all__ = ( - 'DataTransferServiceAsyncClient', -'CheckValidCredsRequest', -'CheckValidCredsResponse', -'CreateTransferConfigRequest', -'DataSource', -'DataSourceParameter', -'DataTransferServiceClient', -'DeleteTransferConfigRequest', -'DeleteTransferRunRequest', -'EmailPreferences', -'EncryptionConfiguration', -'EnrollDataSourcesRequest', -'EventDrivenSchedule', -'GetDataSourceRequest', -'GetTransferConfigRequest', -'GetTransferRunRequest', -'ListDataSourcesRequest', -'ListDataSourcesResponse', -'ListTransferConfigsRequest', -'ListTransferConfigsResponse', -'ListTransferLogsRequest', -'ListTransferLogsResponse', -'ListTransferRunsRequest', -'ListTransferRunsResponse', -'ManualSchedule', -'ScheduleOptions', -'ScheduleOptionsV2', -'ScheduleTransferRunsRequest', -'ScheduleTransferRunsResponse', -'StartManualTransferRunsRequest', -'StartManualTransferRunsResponse', -'TimeBasedSchedule', -'TransferConfig', -'TransferMessage', -'TransferRun', -'TransferState', -'TransferType', -'UnenrollDataSourcesRequest', -'UpdateTransferConfigRequest', -'UserInfo', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json deleted file mode 100644 index 8edf32e89665..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json +++ /dev/null @@ -1,268 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_datatransfer_v1", - "protoPackage": "google.cloud.bigquery.datatransfer.v1", - "schema": "1.0", - "services": { - "DataTransferService": { - "clients": { - "grpc": { - "libraryClient": "DataTransferServiceClient", - "rpcs": { - "CheckValidCreds": { - "methods": [ - "check_valid_creds" - ] - }, - "CreateTransferConfig": { - "methods": [ - "create_transfer_config" - ] - }, - "DeleteTransferConfig": { - "methods": [ - "delete_transfer_config" - ] - }, - "DeleteTransferRun": { - "methods": [ - "delete_transfer_run" - ] - }, - "EnrollDataSources": { - "methods": [ - "enroll_data_sources" - ] - }, - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "GetTransferConfig": { - "methods": [ - "get_transfer_config" - ] - }, - "GetTransferRun": { - "methods": [ - "get_transfer_run" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "ListTransferConfigs": { - "methods": [ - "list_transfer_configs" - ] - }, - "ListTransferLogs": { - "methods": [ - "list_transfer_logs" - ] - }, - "ListTransferRuns": { - "methods": [ - "list_transfer_runs" - ] - }, - "ScheduleTransferRuns": { - "methods": [ - "schedule_transfer_runs" - ] - }, - "StartManualTransferRuns": { - "methods": [ - "start_manual_transfer_runs" - ] - }, - "UnenrollDataSources": { - "methods": [ - "unenroll_data_sources" - ] - }, - "UpdateTransferConfig": { - "methods": [ - "update_transfer_config" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DataTransferServiceAsyncClient", - "rpcs": { - "CheckValidCreds": { - "methods": [ - "check_valid_creds" - ] - }, - "CreateTransferConfig": { - "methods": [ - "create_transfer_config" - ] - }, - "DeleteTransferConfig": { - "methods": [ - "delete_transfer_config" - ] - }, - "DeleteTransferRun": { - "methods": [ - "delete_transfer_run" - ] - }, - "EnrollDataSources": { - "methods": [ - "enroll_data_sources" - ] - }, - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "GetTransferConfig": { - "methods": [ - "get_transfer_config" - ] - }, - "GetTransferRun": { - "methods": [ - "get_transfer_run" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "ListTransferConfigs": { - "methods": [ - "list_transfer_configs" - ] - }, - "ListTransferLogs": { - "methods": [ - "list_transfer_logs" - ] - }, - "ListTransferRuns": { - "methods": [ - "list_transfer_runs" - ] - }, - "ScheduleTransferRuns": { - "methods": [ - "schedule_transfer_runs" - ] - }, - "StartManualTransferRuns": { - "methods": [ - "start_manual_transfer_runs" - ] - }, - "UnenrollDataSources": { - "methods": [ - "unenroll_data_sources" - ] - }, - "UpdateTransferConfig": { - "methods": [ - "update_transfer_config" - ] - } - } - }, - "rest": { - "libraryClient": "DataTransferServiceClient", - "rpcs": { - "CheckValidCreds": { - "methods": [ - "check_valid_creds" - ] - }, - "CreateTransferConfig": { - "methods": [ - "create_transfer_config" - ] - }, - "DeleteTransferConfig": { - "methods": [ - "delete_transfer_config" - ] - }, - "DeleteTransferRun": { - "methods": [ - "delete_transfer_run" - ] - }, - "EnrollDataSources": { - "methods": [ - "enroll_data_sources" - ] - }, - "GetDataSource": { - "methods": [ - "get_data_source" - ] - }, - "GetTransferConfig": { - "methods": [ - "get_transfer_config" - ] - }, - "GetTransferRun": { - "methods": [ - "get_transfer_run" - ] - }, - "ListDataSources": { - "methods": [ - "list_data_sources" - ] - }, - "ListTransferConfigs": { - "methods": [ - "list_transfer_configs" - ] - }, - "ListTransferLogs": { - "methods": [ - "list_transfer_logs" - ] - }, - "ListTransferRuns": { - "methods": [ - "list_transfer_runs" - ] - }, - "ScheduleTransferRuns": { - "methods": [ - "schedule_transfer_runs" - ] - }, - "StartManualTransferRuns": { - "methods": [ - "start_manual_transfer_runs" - ] - }, - "UnenrollDataSources": { - "methods": [ - "unenroll_data_sources" - ] - }, - "UpdateTransferConfig": { - "methods": [ - "update_transfer_config" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/py.typed b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/py.typed deleted file mode 100644 index 1bd9d383cee6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-datatransfer package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py deleted file mode 100644 index 7725abaa21aa..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DataTransferServiceClient -from .async_client import DataTransferServiceAsyncClient - -__all__ = ( - 'DataTransferServiceClient', - 'DataTransferServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py deleted file mode 100644 index ccda5d05db70..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ /dev/null @@ -1,2176 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import warnings - -from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport -from .client import DataTransferServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class DataTransferServiceAsyncClient: - """This API allows users to manage their data transfers into - BigQuery. - """ - - _client: DataTransferServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = DataTransferServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = DataTransferServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = DataTransferServiceClient._DEFAULT_UNIVERSE - - data_source_path = staticmethod(DataTransferServiceClient.data_source_path) - parse_data_source_path = staticmethod(DataTransferServiceClient.parse_data_source_path) - run_path = staticmethod(DataTransferServiceClient.run_path) - parse_run_path = staticmethod(DataTransferServiceClient.parse_run_path) - transfer_config_path = staticmethod(DataTransferServiceClient.transfer_config_path) - parse_transfer_config_path = staticmethod(DataTransferServiceClient.parse_transfer_config_path) - common_billing_account_path = staticmethod(DataTransferServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DataTransferServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DataTransferServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DataTransferServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DataTransferServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DataTransferServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DataTransferServiceClient.common_project_path) - parse_common_project_path = staticmethod(DataTransferServiceClient.parse_common_project_path) - common_location_path = staticmethod(DataTransferServiceClient.common_location_path) - parse_common_location_path = staticmethod(DataTransferServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTransferServiceAsyncClient: The constructed client. - """ - return DataTransferServiceClient.from_service_account_info.__func__(DataTransferServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTransferServiceAsyncClient: The constructed client. - """ - return DataTransferServiceClient.from_service_account_file.__func__(DataTransferServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return DataTransferServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> DataTransferServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataTransferServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = DataTransferServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTransferServiceTransport, Callable[..., DataTransferServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data transfer service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataTransferServiceTransport,Callable[..., DataTransferServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataTransferServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DataTransferServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.datatransfer_v1.DataTransferServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "credentialsType": None, - } - ) - - async def get_data_source(self, - request: Optional[Union[datatransfer.GetDataSourceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.DataSource: - r"""Retrieves a supported data source and returns its - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_get_data_source(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_source(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest, dict]]): - The request object. A request to get data source info. - name (:class:`str`): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` - or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.DataSource: - Defines the properties and custom - parameters for a data source. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.GetDataSourceRequest): - request = datatransfer.GetDataSourceRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_data_source] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_data_sources(self, - request: Optional[Union[datatransfer.ListDataSourcesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataSourcesAsyncPager: - r"""Lists supported data sources and returns their - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_list_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest, dict]]): - The request object. Request to list supported data - sources and their data transfer - settings. - parent (:class:`str`): - Required. The BigQuery project id for which data sources - should be returned. Must be in the form: - ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesAsyncPager: - Returns list of supported data - sources and their metadata. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListDataSourcesRequest): - request = datatransfer.ListDataSourcesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDataSourcesAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_transfer_config(self, - request: Optional[Union[datatransfer.CreateTransferConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - transfer_config: Optional[transfer.TransferConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferConfig: - r"""Creates a new data transfer configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_create_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.CreateTransferConfigRequest( - parent="parent_value", - transfer_config=transfer_config, - ) - - # Make the request - response = await client.create_transfer_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]]): - The request object. A request to create a data transfer configuration. If - new credentials are needed for this transfer - configuration, authorization info must be provided. If - authorization info is provided, the transfer - configuration will be associated with the user id - corresponding to the authorization info. Otherwise, the - transfer configuration will be associated with the - calling user. - - When using a cross project service account for creating - a transfer config, you must enable cross project service - account usage. For more information, see `Disable - attachment of service accounts to resources in other - projects `__. - parent (:class:`str`): - Required. The BigQuery project id where the transfer - configuration should be created. Must be in the format - projects/{project_id}/locations/{location_id} or - projects/{project_id}. If specified location and - location of the destination bigquery dataset do not - match - the request will fail. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): - Required. Data transfer configuration - to create. - - This corresponds to the ``transfer_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferConfig: - Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data - transfer. For example, destination_dataset_id - specifies where data should be stored. When a new - transfer configuration is created, the specified - destination_dataset_id is created when needed and - shared with the appropriate data source service - account. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, transfer_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.CreateTransferConfigRequest): - request = datatransfer.CreateTransferConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if transfer_config is not None: - request.transfer_config = transfer_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_transfer_config(self, - request: Optional[Union[datatransfer.UpdateTransferConfigRequest, dict]] = None, - *, - transfer_config: Optional[transfer.TransferConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferConfig: - r"""Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_update_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.UpdateTransferConfigRequest( - transfer_config=transfer_config, - ) - - # Make the request - response = await client.update_transfer_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]]): - The request object. A request to update a transfer configuration. To update - the user id of the transfer configuration, authorization - info needs to be provided. - - When using a cross project service account for updating - a transfer config, you must enable cross project service - account usage. For more information, see `Disable - attachment of service accounts to resources in other - projects `__. - transfer_config (:class:`google.cloud.bigquery_datatransfer_v1.types.TransferConfig`): - Required. Data transfer configuration - to create. - - This corresponds to the ``transfer_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Required list of fields to - be updated in this request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferConfig: - Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data - transfer. For example, destination_dataset_id - specifies where data should be stored. When a new - transfer configuration is created, the specified - destination_dataset_id is created when needed and - shared with the appropriate data source service - account. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [transfer_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.UpdateTransferConfigRequest): - request = datatransfer.UpdateTransferConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if transfer_config is not None: - request.transfer_config = transfer_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("transfer_config.name", request.transfer_config.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_transfer_config(self, - request: Optional[Union[datatransfer.DeleteTransferConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a data transfer configuration, including any - associated transfer runs and logs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_delete_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_transfer_config(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest, dict]]): - The request object. A request to delete data transfer - information. All associated transfer - runs and log messages will be deleted as - well. - name (:class:`str`): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.DeleteTransferConfigRequest): - request = datatransfer.DeleteTransferConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_transfer_config(self, - request: Optional[Union[datatransfer.GetTransferConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferConfig: - r"""Returns information about a data transfer config. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_get_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_transfer_config(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest, dict]]): - The request object. A request to get data transfer - information. - name (:class:`str`): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferConfig: - Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data - transfer. For example, destination_dataset_id - specifies where data should be stored. When a new - transfer configuration is created, the specified - destination_dataset_id is created when needed and - shared with the appropriate data source service - account. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.GetTransferConfigRequest): - request = datatransfer.GetTransferConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_transfer_configs(self, - request: Optional[Union[datatransfer.ListTransferConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTransferConfigsAsyncPager: - r"""Returns information about all transfer configs owned - by a project in the specified location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_list_transfer_configs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest, dict]]): - The request object. A request to list data transfers - configured for a BigQuery project. - parent (:class:`str`): - Required. The BigQuery project id for which transfer - configs should be returned: ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsAsyncPager: - The returned list of pipelines in the - project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListTransferConfigsRequest): - request = datatransfer.ListTransferConfigsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_transfer_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTransferConfigsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def schedule_transfer_runs(self, - request: Optional[Union[datatransfer.ScheduleTransferRunsRequest, dict]] = None, - *, - parent: Optional[str] = None, - start_time: Optional[timestamp_pb2.Timestamp] = None, - end_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.ScheduleTransferRunsResponse: - r"""Creates transfer runs for a time range [start_time, end_time]. - For each date - or whatever granularity the data source supports - - in the range, one transfer run is created. Note that runs are - created per UTC time in the time range. DEPRECATED: use - StartManualTransferRuns instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_schedule_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ScheduleTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.schedule_transfer_runs(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest, dict]]): - The request object. A request to schedule transfer runs - for a time range. - parent (:class:`str`): - Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - start_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. Start time of the range of transfer runs. For - example, ``"2017-05-25T00:00:00+00:00"``. - - This corresponds to the ``start_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - end_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): - Required. End time of the range of transfer runs. For - example, ``"2017-05-30T00:00:00+00:00"``. - - This corresponds to the ``end_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse: - A response to schedule transfer runs - for a time range. - - """ - warnings.warn("DataTransferServiceAsyncClient.schedule_transfer_runs is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, start_time, end_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ScheduleTransferRunsRequest): - request = datatransfer.ScheduleTransferRunsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if start_time is not None: - request.start_time = start_time - if end_time is not None: - request.end_time = end_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.schedule_transfer_runs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def start_manual_transfer_runs(self, - request: Optional[Union[datatransfer.StartManualTransferRunsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.StartManualTransferRunsResponse: - r"""Start manual transfer runs to be executed now with schedule_time - equal to current time. The transfer runs can be created for a - time range where the run_time is between start_time (inclusive) - and end_time (exclusive), or for a specific run_time. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_start_manual_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.StartManualTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.start_manual_transfer_runs(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest, dict]]): - The request object. A request to start manual transfer - runs. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse: - A response to start manual transfer - runs. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.StartManualTransferRunsRequest): - request = datatransfer.StartManualTransferRunsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.start_manual_transfer_runs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_transfer_run(self, - request: Optional[Union[datatransfer.GetTransferRunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferRun: - r"""Returns information about the particular transfer - run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_get_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferRunRequest( - name="name_value", - ) - - # Make the request - response = await client.get_transfer_run(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest, dict]]): - The request object. A request to get data transfer run - information. - name (:class:`str`): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferRun: - Represents a data transfer run. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.GetTransferRunRequest): - request = datatransfer.GetTransferRunRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_transfer_run] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_transfer_run(self, - request: Optional[Union[datatransfer.DeleteTransferRunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the specified transfer run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_delete_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferRunRequest( - name="name_value", - ) - - # Make the request - await client.delete_transfer_run(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest, dict]]): - The request object. A request to delete data transfer run - information. - name (:class:`str`): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.DeleteTransferRunRequest): - request = datatransfer.DeleteTransferRunRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_transfer_run] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def list_transfer_runs(self, - request: Optional[Union[datatransfer.ListTransferRunsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTransferRunsAsyncPager: - r"""Returns information about running and completed - transfer runs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_list_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_runs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest, dict]]): - The request object. A request to list data transfer runs. - parent (:class:`str`): - Required. Name of transfer configuration for which - transfer runs should be retrieved. Format of transfer - configuration resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsAsyncPager: - The returned list of pipelines in the - project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListTransferRunsRequest): - request = datatransfer.ListTransferRunsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_transfer_runs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTransferRunsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_transfer_logs(self, - request: Optional[Union[datatransfer.ListTransferLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTransferLogsAsyncPager: - r"""Returns log messages for the transfer run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_list_transfer_logs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferLogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_logs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest, dict]]): - The request object. A request to get user facing log - messages associated with data transfer - run. - parent (:class:`str`): - Required. Transfer run name in the form: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsAsyncPager: - The returned list transfer run - messages. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListTransferLogsRequest): - request = datatransfer.ListTransferLogsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_transfer_logs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTransferLogsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def check_valid_creds(self, - request: Optional[Union[datatransfer.CheckValidCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.CheckValidCredsResponse: - r"""Returns true if valid credentials exist for the given - data source and requesting user. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_check_valid_creds(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.CheckValidCredsRequest( - name="name_value", - ) - - # Make the request - response = await client.check_valid_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest, dict]]): - The request object. A request to determine whether the - user has valid credentials. This method - is used to limit the number of OAuth - popups in the user interface. The user - id is inferred from the API call - context. If the data source has the - Google+ authorization type, this method - returns false, as it cannot be - determined whether the credentials are - already valid merely based on the user - id. - name (:class:`str`): - Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` - or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse: - A response indicating whether the - credentials exist and are valid. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.CheckValidCredsRequest): - request = datatransfer.CheckValidCredsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.check_valid_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def enroll_data_sources(self, - request: Optional[Union[datatransfer.EnrollDataSourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Enroll data sources in a user project. This allows users to - create transfer configurations for these data sources. They will - also appear in the ListDataSources RPC and as such, will appear - in the `BigQuery - UI `__, and the - documents can be found in the public guide for `BigQuery Web - UI `__ and - `Data Transfer - Service `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_enroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.EnrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - await client.enroll_data_sources(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest, dict]]): - The request object. A request to enroll a set of data sources so they are - visible in the BigQuery UI's ``Transfer`` tab. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.EnrollDataSourcesRequest): - request = datatransfer.EnrollDataSourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.enroll_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def unenroll_data_sources(self, - request: Optional[Union[datatransfer.UnenrollDataSourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Unenroll data sources in a user project. This allows users to - remove transfer configurations for these data sources. They will - no longer appear in the ListDataSources RPC and will also no - longer appear in the `BigQuery - UI `__. Data - transfers configurations of unenrolled data sources will not be - scheduled. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - async def sample_unenroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.UnenrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - await client.unenroll_data_sources(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_datatransfer_v1.types.UnenrollDataSourcesRequest, dict]]): - The request object. A request to unenroll a set of data sources so they are - no longer visible in the BigQuery UI's ``Transfer`` tab. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.UnenrollDataSourcesRequest): - request = datatransfer.UnenrollDataSourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.unenroll_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self.transport._wrapped_methods[self._client._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self) -> "DataTransferServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataTransferServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py deleted file mode 100644 index b8abaeed93e5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ /dev/null @@ -1,2545 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DataTransferServiceGrpcTransport -from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport -from .transports.rest import DataTransferServiceRestTransport - - -class DataTransferServiceClientMeta(type): - """Metaclass for the DataTransferService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DataTransferServiceTransport]] - _transport_registry["grpc"] = DataTransferServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport - _transport_registry["rest"] = DataTransferServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[DataTransferServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DataTransferServiceClient(metaclass=DataTransferServiceClientMeta): - """This API allows users to manage their data transfers into - BigQuery. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigquerydatatransfer.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigquerydatatransfer.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTransferServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DataTransferServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DataTransferServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DataTransferServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def data_source_path(project: str,data_source: str,) -> str: - """Returns a fully-qualified data_source string.""" - return "projects/{project}/dataSources/{data_source}".format(project=project, data_source=data_source, ) - - @staticmethod - def parse_data_source_path(path: str) -> Dict[str,str]: - """Parses a data_source path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dataSources/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def run_path(project: str,transfer_config: str,run: str,) -> str: - """Returns a fully-qualified run string.""" - return "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format(project=project, transfer_config=transfer_config, run=run, ) - - @staticmethod - def parse_run_path(path: str) -> Dict[str,str]: - """Parses a run path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/transferConfigs/(?P.+?)/runs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def transfer_config_path(project: str,transfer_config: str,) -> str: - """Returns a fully-qualified transfer_config string.""" - return "projects/{project}/transferConfigs/{transfer_config}".format(project=project, transfer_config=transfer_config, ) - - @staticmethod - def parse_transfer_config_path(path: str) -> Dict[str,str]: - """Parses a transfer_config path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/transferConfigs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = DataTransferServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = DataTransferServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = DataTransferServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DataTransferServiceTransport, Callable[..., DataTransferServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the data transfer service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,DataTransferServiceTransport,Callable[..., DataTransferServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the DataTransferServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = DataTransferServiceClient._read_environment_variables() - self._client_cert_source = DataTransferServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = DataTransferServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, DataTransferServiceTransport) - if transport_provided: - # transport is a DataTransferServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(DataTransferServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - DataTransferServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[DataTransferServiceTransport], Callable[..., DataTransferServiceTransport]] = ( - DataTransferServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., DataTransferServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "credentialsType": None, - } - ) - - def get_data_source(self, - request: Optional[Union[datatransfer.GetDataSourceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.DataSource: - r"""Retrieves a supported data source and returns its - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_get_data_source(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_source(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest, dict]): - The request object. A request to get data source info. - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` - or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.DataSource: - Defines the properties and custom - parameters for a data source. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.GetDataSourceRequest): - request = datatransfer.GetDataSourceRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_data_source] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_data_sources(self, - request: Optional[Union[datatransfer.ListDataSourcesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListDataSourcesPager: - r"""Lists supported data sources and returns their - settings. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_list_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest, dict]): - The request object. Request to list supported data - sources and their data transfer - settings. - parent (str): - Required. The BigQuery project id for which data sources - should be returned. Must be in the form: - ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesPager: - Returns list of supported data - sources and their metadata. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListDataSourcesRequest): - request = datatransfer.ListDataSourcesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDataSourcesPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_transfer_config(self, - request: Optional[Union[datatransfer.CreateTransferConfigRequest, dict]] = None, - *, - parent: Optional[str] = None, - transfer_config: Optional[transfer.TransferConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferConfig: - r"""Creates a new data transfer configuration. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_create_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.CreateTransferConfigRequest( - parent="parent_value", - transfer_config=transfer_config, - ) - - # Make the request - response = client.create_transfer_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest, dict]): - The request object. A request to create a data transfer configuration. If - new credentials are needed for this transfer - configuration, authorization info must be provided. If - authorization info is provided, the transfer - configuration will be associated with the user id - corresponding to the authorization info. Otherwise, the - transfer configuration will be associated with the - calling user. - - When using a cross project service account for creating - a transfer config, you must enable cross project service - account usage. For more information, see `Disable - attachment of service accounts to resources in other - projects `__. - parent (str): - Required. The BigQuery project id where the transfer - configuration should be created. Must be in the format - projects/{project_id}/locations/{location_id} or - projects/{project_id}. If specified location and - location of the destination bigquery dataset do not - match - the request will fail. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): - Required. Data transfer configuration - to create. - - This corresponds to the ``transfer_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferConfig: - Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data - transfer. For example, destination_dataset_id - specifies where data should be stored. When a new - transfer configuration is created, the specified - destination_dataset_id is created when needed and - shared with the appropriate data source service - account. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, transfer_config] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.CreateTransferConfigRequest): - request = datatransfer.CreateTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if transfer_config is not None: - request.transfer_config = transfer_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_transfer_config(self, - request: Optional[Union[datatransfer.UpdateTransferConfigRequest, dict]] = None, - *, - transfer_config: Optional[transfer.TransferConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferConfig: - r"""Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_update_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.UpdateTransferConfigRequest( - transfer_config=transfer_config, - ) - - # Make the request - response = client.update_transfer_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest, dict]): - The request object. A request to update a transfer configuration. To update - the user id of the transfer configuration, authorization - info needs to be provided. - - When using a cross project service account for updating - a transfer config, you must enable cross project service - account usage. For more information, see `Disable - attachment of service accounts to resources in other - projects `__. - transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): - Required. Data transfer configuration - to create. - - This corresponds to the ``transfer_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Required list of fields to - be updated in this request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferConfig: - Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data - transfer. For example, destination_dataset_id - specifies where data should be stored. When a new - transfer configuration is created, the specified - destination_dataset_id is created when needed and - shared with the appropriate data source service - account. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [transfer_config, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.UpdateTransferConfigRequest): - request = datatransfer.UpdateTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if transfer_config is not None: - request.transfer_config = transfer_config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("transfer_config.name", request.transfer_config.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_transfer_config(self, - request: Optional[Union[datatransfer.DeleteTransferConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a data transfer configuration, including any - associated transfer runs and logs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_delete_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_transfer_config(request=request) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest, dict]): - The request object. A request to delete data transfer - information. All associated transfer - runs and log messages will be deleted as - well. - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.DeleteTransferConfigRequest): - request = datatransfer.DeleteTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_transfer_config(self, - request: Optional[Union[datatransfer.GetTransferConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferConfig: - r"""Returns information about a data transfer config. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_get_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_transfer_config(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest, dict]): - The request object. A request to get data transfer - information. - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferConfig: - Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data - transfer. For example, destination_dataset_id - specifies where data should be stored. When a new - transfer configuration is created, the specified - destination_dataset_id is created when needed and - shared with the appropriate data source service - account. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.GetTransferConfigRequest): - request = datatransfer.GetTransferConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_transfer_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_transfer_configs(self, - request: Optional[Union[datatransfer.ListTransferConfigsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTransferConfigsPager: - r"""Returns information about all transfer configs owned - by a project in the specified location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_list_transfer_configs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest, dict]): - The request object. A request to list data transfers - configured for a BigQuery project. - parent (str): - Required. The BigQuery project id for which transfer - configs should be returned: ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsPager: - The returned list of pipelines in the - project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListTransferConfigsRequest): - request = datatransfer.ListTransferConfigsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_transfer_configs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTransferConfigsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def schedule_transfer_runs(self, - request: Optional[Union[datatransfer.ScheduleTransferRunsRequest, dict]] = None, - *, - parent: Optional[str] = None, - start_time: Optional[timestamp_pb2.Timestamp] = None, - end_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.ScheduleTransferRunsResponse: - r"""Creates transfer runs for a time range [start_time, end_time]. - For each date - or whatever granularity the data source supports - - in the range, one transfer run is created. Note that runs are - created per UTC time in the time range. DEPRECATED: use - StartManualTransferRuns instead. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_schedule_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ScheduleTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = client.schedule_transfer_runs(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest, dict]): - The request object. A request to schedule transfer runs - for a time range. - parent (str): - Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Start time of the range of transfer runs. For - example, ``"2017-05-25T00:00:00+00:00"``. - - This corresponds to the ``start_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Required. End time of the range of transfer runs. For - example, ``"2017-05-30T00:00:00+00:00"``. - - This corresponds to the ``end_time`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse: - A response to schedule transfer runs - for a time range. - - """ - warnings.warn("DataTransferServiceClient.schedule_transfer_runs is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, start_time, end_time] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ScheduleTransferRunsRequest): - request = datatransfer.ScheduleTransferRunsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if start_time is not None: - request.start_time = start_time - if end_time is not None: - request.end_time = end_time - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.schedule_transfer_runs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def start_manual_transfer_runs(self, - request: Optional[Union[datatransfer.StartManualTransferRunsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.StartManualTransferRunsResponse: - r"""Start manual transfer runs to be executed now with schedule_time - equal to current time. The transfer runs can be created for a - time range where the run_time is between start_time (inclusive) - and end_time (exclusive), or for a specific run_time. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_start_manual_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.StartManualTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = client.start_manual_transfer_runs(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest, dict]): - The request object. A request to start manual transfer - runs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse: - A response to start manual transfer - runs. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.StartManualTransferRunsRequest): - request = datatransfer.StartManualTransferRunsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.start_manual_transfer_runs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_transfer_run(self, - request: Optional[Union[datatransfer.GetTransferRunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> transfer.TransferRun: - r"""Returns information about the particular transfer - run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_get_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferRunRequest( - name="name_value", - ) - - # Make the request - response = client.get_transfer_run(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest, dict]): - The request object. A request to get data transfer run - information. - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.TransferRun: - Represents a data transfer run. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.GetTransferRunRequest): - request = datatransfer.GetTransferRunRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_transfer_run] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_transfer_run(self, - request: Optional[Union[datatransfer.DeleteTransferRunRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes the specified transfer run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_delete_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferRunRequest( - name="name_value", - ) - - # Make the request - client.delete_transfer_run(request=request) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest, dict]): - The request object. A request to delete data transfer run - information. - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.DeleteTransferRunRequest): - request = datatransfer.DeleteTransferRunRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_transfer_run] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def list_transfer_runs(self, - request: Optional[Union[datatransfer.ListTransferRunsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTransferRunsPager: - r"""Returns information about running and completed - transfer runs. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_list_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_runs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest, dict]): - The request object. A request to list data transfer runs. - parent (str): - Required. Name of transfer configuration for which - transfer runs should be retrieved. Format of transfer - configuration resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsPager: - The returned list of pipelines in the - project. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListTransferRunsRequest): - request = datatransfer.ListTransferRunsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_transfer_runs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTransferRunsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_transfer_logs(self, - request: Optional[Union[datatransfer.ListTransferLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTransferLogsPager: - r"""Returns log messages for the transfer run. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_list_transfer_logs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferLogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_logs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest, dict]): - The request object. A request to get user facing log - messages associated with data transfer - run. - parent (str): - Required. Transfer run name in the form: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsPager: - The returned list transfer run - messages. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.ListTransferLogsRequest): - request = datatransfer.ListTransferLogsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_transfer_logs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTransferLogsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def check_valid_creds(self, - request: Optional[Union[datatransfer.CheckValidCredsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> datatransfer.CheckValidCredsResponse: - r"""Returns true if valid credentials exist for the given - data source and requesting user. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_check_valid_creds(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.CheckValidCredsRequest( - name="name_value", - ) - - # Make the request - response = client.check_valid_creds(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest, dict]): - The request object. A request to determine whether the - user has valid credentials. This method - is used to limit the number of OAuth - popups in the user interface. The user - id is inferred from the API call - context. If the data source has the - Google+ authorization type, this method - returns false, as it cannot be - determined whether the credentials are - already valid merely based on the user - id. - name (str): - Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` - or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse: - A response indicating whether the - credentials exist and are valid. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.CheckValidCredsRequest): - request = datatransfer.CheckValidCredsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.check_valid_creds] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def enroll_data_sources(self, - request: Optional[Union[datatransfer.EnrollDataSourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Enroll data sources in a user project. This allows users to - create transfer configurations for these data sources. They will - also appear in the ListDataSources RPC and as such, will appear - in the `BigQuery - UI `__, and the - documents can be found in the public guide for `BigQuery Web - UI `__ and - `Data Transfer - Service `__. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_enroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.EnrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - client.enroll_data_sources(request=request) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest, dict]): - The request object. A request to enroll a set of data sources so they are - visible in the BigQuery UI's ``Transfer`` tab. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.EnrollDataSourcesRequest): - request = datatransfer.EnrollDataSourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.enroll_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def unenroll_data_sources(self, - request: Optional[Union[datatransfer.UnenrollDataSourcesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Unenroll data sources in a user project. This allows users to - remove transfer configurations for these data sources. They will - no longer appear in the ListDataSources RPC and will also no - longer appear in the `BigQuery - UI `__. Data - transfers configurations of unenrolled data sources will not be - scheduled. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_datatransfer_v1 - - def sample_unenroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.UnenrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - client.unenroll_data_sources(request=request) - - Args: - request (Union[google.cloud.bigquery_datatransfer_v1.types.UnenrollDataSourcesRequest, dict]): - The request object. A request to unenroll a set of data sources so they are - no longer visible in the BigQuery UI's ``Transfer`` tab. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, datatransfer.UnenrollDataSourcesRequest): - request = datatransfer.UnenrollDataSourcesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.unenroll_data_sources] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "DataTransferServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_location( - self, - request: Optional[locations_pb2.GetLocationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.Location: - r"""Gets information about a location. - - Args: - request (:class:`~.location_pb2.GetLocationRequest`): - The request object. Request message for - `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.Location: - Location object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.GetLocationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_location] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - def list_locations( - self, - request: Optional[locations_pb2.ListLocationsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> locations_pb2.ListLocationsResponse: - r"""Lists information about the supported locations for this service. - - Args: - request (:class:`~.location_pb2.ListLocationsRequest`): - The request object. Request message for - `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - Returns: - ~.location_pb2.ListLocationsResponse: - Response message for ``ListLocations`` method. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = locations_pb2.ListLocationsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_locations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - try: - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - except core_exceptions.GoogleAPICallError as e: - self._add_cred_info_for_auth_errors(e) - raise e - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "DataTransferServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py deleted file mode 100644 index 57a2d08f8ba0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py +++ /dev/null @@ -1,584 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer - - -class ListDataSourcesPager: - """A pager for iterating through ``list_data_sources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``data_sources`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDataSources`` requests and continue to iterate - through the ``data_sources`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datatransfer.ListDataSourcesResponse], - request: datatransfer.ListDataSourcesRequest, - response: datatransfer.ListDataSourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListDataSourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datatransfer.ListDataSourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[datatransfer.DataSource]: - for page in self.pages: - yield from page.data_sources - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDataSourcesAsyncPager: - """A pager for iterating through ``list_data_sources`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``data_sources`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDataSources`` requests and continue to iterate - through the ``data_sources`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datatransfer.ListDataSourcesResponse]], - request: datatransfer.ListDataSourcesRequest, - response: datatransfer.ListDataSourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListDataSourcesRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datatransfer.ListDataSourcesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[datatransfer.DataSource]: - async def async_generator(): - async for page in self.pages: - for response in page.data_sources: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTransferConfigsPager: - """A pager for iterating through ``list_transfer_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``transfer_configs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTransferConfigs`` requests and continue to iterate - through the ``transfer_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datatransfer.ListTransferConfigsResponse], - request: datatransfer.ListTransferConfigsRequest, - response: datatransfer.ListTransferConfigsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListTransferConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datatransfer.ListTransferConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[transfer.TransferConfig]: - for page in self.pages: - yield from page.transfer_configs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTransferConfigsAsyncPager: - """A pager for iterating through ``list_transfer_configs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``transfer_configs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTransferConfigs`` requests and continue to iterate - through the ``transfer_configs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datatransfer.ListTransferConfigsResponse]], - request: datatransfer.ListTransferConfigsRequest, - response: datatransfer.ListTransferConfigsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListTransferConfigsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datatransfer.ListTransferConfigsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[transfer.TransferConfig]: - async def async_generator(): - async for page in self.pages: - for response in page.transfer_configs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTransferRunsPager: - """A pager for iterating through ``list_transfer_runs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``transfer_runs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTransferRuns`` requests and continue to iterate - through the ``transfer_runs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datatransfer.ListTransferRunsResponse], - request: datatransfer.ListTransferRunsRequest, - response: datatransfer.ListTransferRunsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListTransferRunsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datatransfer.ListTransferRunsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[transfer.TransferRun]: - for page in self.pages: - yield from page.transfer_runs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTransferRunsAsyncPager: - """A pager for iterating through ``list_transfer_runs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``transfer_runs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTransferRuns`` requests and continue to iterate - through the ``transfer_runs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datatransfer.ListTransferRunsResponse]], - request: datatransfer.ListTransferRunsRequest, - response: datatransfer.ListTransferRunsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListTransferRunsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datatransfer.ListTransferRunsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[transfer.TransferRun]: - async def async_generator(): - async for page in self.pages: - for response in page.transfer_runs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTransferLogsPager: - """A pager for iterating through ``list_transfer_logs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``transfer_messages`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTransferLogs`` requests and continue to iterate - through the ``transfer_messages`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., datatransfer.ListTransferLogsResponse], - request: datatransfer.ListTransferLogsRequest, - response: datatransfer.ListTransferLogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListTransferLogsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[datatransfer.ListTransferLogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[transfer.TransferMessage]: - for page in self.pages: - yield from page.transfer_messages - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTransferLogsAsyncPager: - """A pager for iterating through ``list_transfer_logs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``transfer_messages`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTransferLogs`` requests and continue to iterate - through the ``transfer_messages`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[datatransfer.ListTransferLogsResponse]], - request: datatransfer.ListTransferLogsRequest, - response: datatransfer.ListTransferLogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest): - The initial request object. - response (google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = datatransfer.ListTransferLogsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[datatransfer.ListTransferLogsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[transfer.TransferMessage]: - async def async_generator(): - async for page in self.pages: - for response in page.transfer_messages: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/README.rst deleted file mode 100644 index 6f9ef878ffff..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`DataTransferServiceTransport` is the ABC for all transports. -- public child `DataTransferServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `DataTransferServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseDataTransferServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `DataTransferServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py deleted file mode 100644 index ea4d79f83c85..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DataTransferServiceTransport -from .grpc import DataTransferServiceGrpcTransport -from .grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport -from .rest import DataTransferServiceRestTransport -from .rest import DataTransferServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DataTransferServiceTransport]] -_transport_registry['grpc'] = DataTransferServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DataTransferServiceGrpcAsyncIOTransport -_transport_registry['rest'] = DataTransferServiceRestTransport - -__all__ = ( - 'DataTransferServiceTransport', - 'DataTransferServiceGrpcTransport', - 'DataTransferServiceGrpcAsyncIOTransport', - 'DataTransferServiceRestTransport', - 'DataTransferServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py deleted file mode 100644 index 5c8e4dfff2b5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py +++ /dev/null @@ -1,493 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class DataTransferServiceTransport(abc.ABC): - """Abstract transport class for DataTransferService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigquerydatatransfer.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatatransfer.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_data_source: gapic_v1.method.wrap_method( - self.get_data_source, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_data_sources: gapic_v1.method.wrap_method( - self.list_data_sources, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.create_transfer_config: gapic_v1.method.wrap_method( - self.create_transfer_config, - default_timeout=30.0, - client_info=client_info, - ), - self.update_transfer_config: gapic_v1.method.wrap_method( - self.update_transfer_config, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_transfer_config: gapic_v1.method.wrap_method( - self.delete_transfer_config, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_transfer_config: gapic_v1.method.wrap_method( - self.get_transfer_config, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_transfer_configs: gapic_v1.method.wrap_method( - self.list_transfer_configs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.schedule_transfer_runs: gapic_v1.method.wrap_method( - self.schedule_transfer_runs, - default_timeout=30.0, - client_info=client_info, - ), - self.start_manual_transfer_runs: gapic_v1.method.wrap_method( - self.start_manual_transfer_runs, - default_timeout=None, - client_info=client_info, - ), - self.get_transfer_run: gapic_v1.method.wrap_method( - self.get_transfer_run, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.delete_transfer_run: gapic_v1.method.wrap_method( - self.delete_transfer_run, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_transfer_runs: gapic_v1.method.wrap_method( - self.list_transfer_runs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_transfer_logs: gapic_v1.method.wrap_method( - self.list_transfer_logs, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.check_valid_creds: gapic_v1.method.wrap_method( - self.check_valid_creds, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.enroll_data_sources: gapic_v1.method.wrap_method( - self.enroll_data_sources, - default_timeout=None, - client_info=client_info, - ), - self.unenroll_data_sources: gapic_v1.method.wrap_method( - self.unenroll_data_sources, - default_timeout=None, - client_info=client_info, - ), - self.get_location: gapic_v1.method.wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: gapic_v1.method.wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_data_source(self) -> Callable[ - [datatransfer.GetDataSourceRequest], - Union[ - datatransfer.DataSource, - Awaitable[datatransfer.DataSource] - ]]: - raise NotImplementedError() - - @property - def list_data_sources(self) -> Callable[ - [datatransfer.ListDataSourcesRequest], - Union[ - datatransfer.ListDataSourcesResponse, - Awaitable[datatransfer.ListDataSourcesResponse] - ]]: - raise NotImplementedError() - - @property - def create_transfer_config(self) -> Callable[ - [datatransfer.CreateTransferConfigRequest], - Union[ - transfer.TransferConfig, - Awaitable[transfer.TransferConfig] - ]]: - raise NotImplementedError() - - @property - def update_transfer_config(self) -> Callable[ - [datatransfer.UpdateTransferConfigRequest], - Union[ - transfer.TransferConfig, - Awaitable[transfer.TransferConfig] - ]]: - raise NotImplementedError() - - @property - def delete_transfer_config(self) -> Callable[ - [datatransfer.DeleteTransferConfigRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_transfer_config(self) -> Callable[ - [datatransfer.GetTransferConfigRequest], - Union[ - transfer.TransferConfig, - Awaitable[transfer.TransferConfig] - ]]: - raise NotImplementedError() - - @property - def list_transfer_configs(self) -> Callable[ - [datatransfer.ListTransferConfigsRequest], - Union[ - datatransfer.ListTransferConfigsResponse, - Awaitable[datatransfer.ListTransferConfigsResponse] - ]]: - raise NotImplementedError() - - @property - def schedule_transfer_runs(self) -> Callable[ - [datatransfer.ScheduleTransferRunsRequest], - Union[ - datatransfer.ScheduleTransferRunsResponse, - Awaitable[datatransfer.ScheduleTransferRunsResponse] - ]]: - raise NotImplementedError() - - @property - def start_manual_transfer_runs(self) -> Callable[ - [datatransfer.StartManualTransferRunsRequest], - Union[ - datatransfer.StartManualTransferRunsResponse, - Awaitable[datatransfer.StartManualTransferRunsResponse] - ]]: - raise NotImplementedError() - - @property - def get_transfer_run(self) -> Callable[ - [datatransfer.GetTransferRunRequest], - Union[ - transfer.TransferRun, - Awaitable[transfer.TransferRun] - ]]: - raise NotImplementedError() - - @property - def delete_transfer_run(self) -> Callable[ - [datatransfer.DeleteTransferRunRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def list_transfer_runs(self) -> Callable[ - [datatransfer.ListTransferRunsRequest], - Union[ - datatransfer.ListTransferRunsResponse, - Awaitable[datatransfer.ListTransferRunsResponse] - ]]: - raise NotImplementedError() - - @property - def list_transfer_logs(self) -> Callable[ - [datatransfer.ListTransferLogsRequest], - Union[ - datatransfer.ListTransferLogsResponse, - Awaitable[datatransfer.ListTransferLogsResponse] - ]]: - raise NotImplementedError() - - @property - def check_valid_creds(self) -> Callable[ - [datatransfer.CheckValidCredsRequest], - Union[ - datatransfer.CheckValidCredsResponse, - Awaitable[datatransfer.CheckValidCredsResponse] - ]]: - raise NotImplementedError() - - @property - def enroll_data_sources(self) -> Callable[ - [datatransfer.EnrollDataSourcesRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def unenroll_data_sources(self) -> Callable[ - [datatransfer.UnenrollDataSourcesRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: - raise NotImplementedError() - - @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'DataTransferServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py deleted file mode 100644 index 7fee25987026..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py +++ /dev/null @@ -1,806 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataTransferServiceGrpcTransport(DataTransferServiceTransport): - """gRPC backend transport for DataTransferService. - - This API allows users to manage their data transfers into - BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigquerydatatransfer.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatatransfer.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigquerydatatransfer.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def get_data_source(self) -> Callable[ - [datatransfer.GetDataSourceRequest], - datatransfer.DataSource]: - r"""Return a callable for the get data source method over gRPC. - - Retrieves a supported data source and returns its - settings. - - Returns: - Callable[[~.GetDataSourceRequest], - ~.DataSource]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_source' not in self._stubs: - self._stubs['get_data_source'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource', - request_serializer=datatransfer.GetDataSourceRequest.serialize, - response_deserializer=datatransfer.DataSource.deserialize, - ) - return self._stubs['get_data_source'] - - @property - def list_data_sources(self) -> Callable[ - [datatransfer.ListDataSourcesRequest], - datatransfer.ListDataSourcesResponse]: - r"""Return a callable for the list data sources method over gRPC. - - Lists supported data sources and returns their - settings. - - Returns: - Callable[[~.ListDataSourcesRequest], - ~.ListDataSourcesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_sources' not in self._stubs: - self._stubs['list_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources', - request_serializer=datatransfer.ListDataSourcesRequest.serialize, - response_deserializer=datatransfer.ListDataSourcesResponse.deserialize, - ) - return self._stubs['list_data_sources'] - - @property - def create_transfer_config(self) -> Callable[ - [datatransfer.CreateTransferConfigRequest], - transfer.TransferConfig]: - r"""Return a callable for the create transfer config method over gRPC. - - Creates a new data transfer configuration. - - Returns: - Callable[[~.CreateTransferConfigRequest], - ~.TransferConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_transfer_config' not in self._stubs: - self._stubs['create_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig', - request_serializer=datatransfer.CreateTransferConfigRequest.serialize, - response_deserializer=transfer.TransferConfig.deserialize, - ) - return self._stubs['create_transfer_config'] - - @property - def update_transfer_config(self) -> Callable[ - [datatransfer.UpdateTransferConfigRequest], - transfer.TransferConfig]: - r"""Return a callable for the update transfer config method over gRPC. - - Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - Returns: - Callable[[~.UpdateTransferConfigRequest], - ~.TransferConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_transfer_config' not in self._stubs: - self._stubs['update_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig', - request_serializer=datatransfer.UpdateTransferConfigRequest.serialize, - response_deserializer=transfer.TransferConfig.deserialize, - ) - return self._stubs['update_transfer_config'] - - @property - def delete_transfer_config(self) -> Callable[ - [datatransfer.DeleteTransferConfigRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete transfer config method over gRPC. - - Deletes a data transfer configuration, including any - associated transfer runs and logs. - - Returns: - Callable[[~.DeleteTransferConfigRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_transfer_config' not in self._stubs: - self._stubs['delete_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig', - request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_transfer_config'] - - @property - def get_transfer_config(self) -> Callable[ - [datatransfer.GetTransferConfigRequest], - transfer.TransferConfig]: - r"""Return a callable for the get transfer config method over gRPC. - - Returns information about a data transfer config. - - Returns: - Callable[[~.GetTransferConfigRequest], - ~.TransferConfig]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_transfer_config' not in self._stubs: - self._stubs['get_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig', - request_serializer=datatransfer.GetTransferConfigRequest.serialize, - response_deserializer=transfer.TransferConfig.deserialize, - ) - return self._stubs['get_transfer_config'] - - @property - def list_transfer_configs(self) -> Callable[ - [datatransfer.ListTransferConfigsRequest], - datatransfer.ListTransferConfigsResponse]: - r"""Return a callable for the list transfer configs method over gRPC. - - Returns information about all transfer configs owned - by a project in the specified location. - - Returns: - Callable[[~.ListTransferConfigsRequest], - ~.ListTransferConfigsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_transfer_configs' not in self._stubs: - self._stubs['list_transfer_configs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs', - request_serializer=datatransfer.ListTransferConfigsRequest.serialize, - response_deserializer=datatransfer.ListTransferConfigsResponse.deserialize, - ) - return self._stubs['list_transfer_configs'] - - @property - def schedule_transfer_runs(self) -> Callable[ - [datatransfer.ScheduleTransferRunsRequest], - datatransfer.ScheduleTransferRunsResponse]: - r"""Return a callable for the schedule transfer runs method over gRPC. - - Creates transfer runs for a time range [start_time, end_time]. - For each date - or whatever granularity the data source supports - - in the range, one transfer run is created. Note that runs are - created per UTC time in the time range. DEPRECATED: use - StartManualTransferRuns instead. - - Returns: - Callable[[~.ScheduleTransferRunsRequest], - ~.ScheduleTransferRunsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'schedule_transfer_runs' not in self._stubs: - self._stubs['schedule_transfer_runs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns', - request_serializer=datatransfer.ScheduleTransferRunsRequest.serialize, - response_deserializer=datatransfer.ScheduleTransferRunsResponse.deserialize, - ) - return self._stubs['schedule_transfer_runs'] - - @property - def start_manual_transfer_runs(self) -> Callable[ - [datatransfer.StartManualTransferRunsRequest], - datatransfer.StartManualTransferRunsResponse]: - r"""Return a callable for the start manual transfer runs method over gRPC. - - Start manual transfer runs to be executed now with schedule_time - equal to current time. The transfer runs can be created for a - time range where the run_time is between start_time (inclusive) - and end_time (exclusive), or for a specific run_time. - - Returns: - Callable[[~.StartManualTransferRunsRequest], - ~.StartManualTransferRunsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_manual_transfer_runs' not in self._stubs: - self._stubs['start_manual_transfer_runs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns', - request_serializer=datatransfer.StartManualTransferRunsRequest.serialize, - response_deserializer=datatransfer.StartManualTransferRunsResponse.deserialize, - ) - return self._stubs['start_manual_transfer_runs'] - - @property - def get_transfer_run(self) -> Callable[ - [datatransfer.GetTransferRunRequest], - transfer.TransferRun]: - r"""Return a callable for the get transfer run method over gRPC. - - Returns information about the particular transfer - run. - - Returns: - Callable[[~.GetTransferRunRequest], - ~.TransferRun]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_transfer_run' not in self._stubs: - self._stubs['get_transfer_run'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun', - request_serializer=datatransfer.GetTransferRunRequest.serialize, - response_deserializer=transfer.TransferRun.deserialize, - ) - return self._stubs['get_transfer_run'] - - @property - def delete_transfer_run(self) -> Callable[ - [datatransfer.DeleteTransferRunRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete transfer run method over gRPC. - - Deletes the specified transfer run. - - Returns: - Callable[[~.DeleteTransferRunRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_transfer_run' not in self._stubs: - self._stubs['delete_transfer_run'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun', - request_serializer=datatransfer.DeleteTransferRunRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_transfer_run'] - - @property - def list_transfer_runs(self) -> Callable[ - [datatransfer.ListTransferRunsRequest], - datatransfer.ListTransferRunsResponse]: - r"""Return a callable for the list transfer runs method over gRPC. - - Returns information about running and completed - transfer runs. - - Returns: - Callable[[~.ListTransferRunsRequest], - ~.ListTransferRunsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_transfer_runs' not in self._stubs: - self._stubs['list_transfer_runs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns', - request_serializer=datatransfer.ListTransferRunsRequest.serialize, - response_deserializer=datatransfer.ListTransferRunsResponse.deserialize, - ) - return self._stubs['list_transfer_runs'] - - @property - def list_transfer_logs(self) -> Callable[ - [datatransfer.ListTransferLogsRequest], - datatransfer.ListTransferLogsResponse]: - r"""Return a callable for the list transfer logs method over gRPC. - - Returns log messages for the transfer run. - - Returns: - Callable[[~.ListTransferLogsRequest], - ~.ListTransferLogsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_transfer_logs' not in self._stubs: - self._stubs['list_transfer_logs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs', - request_serializer=datatransfer.ListTransferLogsRequest.serialize, - response_deserializer=datatransfer.ListTransferLogsResponse.deserialize, - ) - return self._stubs['list_transfer_logs'] - - @property - def check_valid_creds(self) -> Callable[ - [datatransfer.CheckValidCredsRequest], - datatransfer.CheckValidCredsResponse]: - r"""Return a callable for the check valid creds method over gRPC. - - Returns true if valid credentials exist for the given - data source and requesting user. - - Returns: - Callable[[~.CheckValidCredsRequest], - ~.CheckValidCredsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'check_valid_creds' not in self._stubs: - self._stubs['check_valid_creds'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds', - request_serializer=datatransfer.CheckValidCredsRequest.serialize, - response_deserializer=datatransfer.CheckValidCredsResponse.deserialize, - ) - return self._stubs['check_valid_creds'] - - @property - def enroll_data_sources(self) -> Callable[ - [datatransfer.EnrollDataSourcesRequest], - empty_pb2.Empty]: - r"""Return a callable for the enroll data sources method over gRPC. - - Enroll data sources in a user project. This allows users to - create transfer configurations for these data sources. They will - also appear in the ListDataSources RPC and as such, will appear - in the `BigQuery - UI `__, and the - documents can be found in the public guide for `BigQuery Web - UI `__ and - `Data Transfer - Service `__. - - Returns: - Callable[[~.EnrollDataSourcesRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'enroll_data_sources' not in self._stubs: - self._stubs['enroll_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/EnrollDataSources', - request_serializer=datatransfer.EnrollDataSourcesRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['enroll_data_sources'] - - @property - def unenroll_data_sources(self) -> Callable[ - [datatransfer.UnenrollDataSourcesRequest], - empty_pb2.Empty]: - r"""Return a callable for the unenroll data sources method over gRPC. - - Unenroll data sources in a user project. This allows users to - remove transfer configurations for these data sources. They will - no longer appear in the ListDataSources RPC and will also no - longer appear in the `BigQuery - UI `__. Data - transfers configurations of unenrolled data sources will not be - scheduled. - - Returns: - Callable[[~.UnenrollDataSourcesRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'unenroll_data_sources' not in self._stubs: - self._stubs['unenroll_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/UnenrollDataSources', - request_serializer=datatransfer.UnenrollDataSourcesRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['unenroll_data_sources'] - - def close(self): - self._logged_channel.close() - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'DataTransferServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py deleted file mode 100644 index ab125782318b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1011 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.cloud.location import locations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DataTransferServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class DataTransferServiceGrpcAsyncIOTransport(DataTransferServiceTransport): - """gRPC AsyncIO backend transport for DataTransferService. - - This API allows users to manage their data transfers into - BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigquerydatatransfer.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigquerydatatransfer.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatatransfer.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def get_data_source(self) -> Callable[ - [datatransfer.GetDataSourceRequest], - Awaitable[datatransfer.DataSource]]: - r"""Return a callable for the get data source method over gRPC. - - Retrieves a supported data source and returns its - settings. - - Returns: - Callable[[~.GetDataSourceRequest], - Awaitable[~.DataSource]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_data_source' not in self._stubs: - self._stubs['get_data_source'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource', - request_serializer=datatransfer.GetDataSourceRequest.serialize, - response_deserializer=datatransfer.DataSource.deserialize, - ) - return self._stubs['get_data_source'] - - @property - def list_data_sources(self) -> Callable[ - [datatransfer.ListDataSourcesRequest], - Awaitable[datatransfer.ListDataSourcesResponse]]: - r"""Return a callable for the list data sources method over gRPC. - - Lists supported data sources and returns their - settings. - - Returns: - Callable[[~.ListDataSourcesRequest], - Awaitable[~.ListDataSourcesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_data_sources' not in self._stubs: - self._stubs['list_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources', - request_serializer=datatransfer.ListDataSourcesRequest.serialize, - response_deserializer=datatransfer.ListDataSourcesResponse.deserialize, - ) - return self._stubs['list_data_sources'] - - @property - def create_transfer_config(self) -> Callable[ - [datatransfer.CreateTransferConfigRequest], - Awaitable[transfer.TransferConfig]]: - r"""Return a callable for the create transfer config method over gRPC. - - Creates a new data transfer configuration. - - Returns: - Callable[[~.CreateTransferConfigRequest], - Awaitable[~.TransferConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_transfer_config' not in self._stubs: - self._stubs['create_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig', - request_serializer=datatransfer.CreateTransferConfigRequest.serialize, - response_deserializer=transfer.TransferConfig.deserialize, - ) - return self._stubs['create_transfer_config'] - - @property - def update_transfer_config(self) -> Callable[ - [datatransfer.UpdateTransferConfigRequest], - Awaitable[transfer.TransferConfig]]: - r"""Return a callable for the update transfer config method over gRPC. - - Updates a data transfer configuration. - All fields must be set, even if they are not updated. - - Returns: - Callable[[~.UpdateTransferConfigRequest], - Awaitable[~.TransferConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_transfer_config' not in self._stubs: - self._stubs['update_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig', - request_serializer=datatransfer.UpdateTransferConfigRequest.serialize, - response_deserializer=transfer.TransferConfig.deserialize, - ) - return self._stubs['update_transfer_config'] - - @property - def delete_transfer_config(self) -> Callable[ - [datatransfer.DeleteTransferConfigRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete transfer config method over gRPC. - - Deletes a data transfer configuration, including any - associated transfer runs and logs. - - Returns: - Callable[[~.DeleteTransferConfigRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_transfer_config' not in self._stubs: - self._stubs['delete_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig', - request_serializer=datatransfer.DeleteTransferConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_transfer_config'] - - @property - def get_transfer_config(self) -> Callable[ - [datatransfer.GetTransferConfigRequest], - Awaitable[transfer.TransferConfig]]: - r"""Return a callable for the get transfer config method over gRPC. - - Returns information about a data transfer config. - - Returns: - Callable[[~.GetTransferConfigRequest], - Awaitable[~.TransferConfig]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_transfer_config' not in self._stubs: - self._stubs['get_transfer_config'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig', - request_serializer=datatransfer.GetTransferConfigRequest.serialize, - response_deserializer=transfer.TransferConfig.deserialize, - ) - return self._stubs['get_transfer_config'] - - @property - def list_transfer_configs(self) -> Callable[ - [datatransfer.ListTransferConfigsRequest], - Awaitable[datatransfer.ListTransferConfigsResponse]]: - r"""Return a callable for the list transfer configs method over gRPC. - - Returns information about all transfer configs owned - by a project in the specified location. - - Returns: - Callable[[~.ListTransferConfigsRequest], - Awaitable[~.ListTransferConfigsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_transfer_configs' not in self._stubs: - self._stubs['list_transfer_configs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs', - request_serializer=datatransfer.ListTransferConfigsRequest.serialize, - response_deserializer=datatransfer.ListTransferConfigsResponse.deserialize, - ) - return self._stubs['list_transfer_configs'] - - @property - def schedule_transfer_runs(self) -> Callable[ - [datatransfer.ScheduleTransferRunsRequest], - Awaitable[datatransfer.ScheduleTransferRunsResponse]]: - r"""Return a callable for the schedule transfer runs method over gRPC. - - Creates transfer runs for a time range [start_time, end_time]. - For each date - or whatever granularity the data source supports - - in the range, one transfer run is created. Note that runs are - created per UTC time in the time range. DEPRECATED: use - StartManualTransferRuns instead. - - Returns: - Callable[[~.ScheduleTransferRunsRequest], - Awaitable[~.ScheduleTransferRunsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'schedule_transfer_runs' not in self._stubs: - self._stubs['schedule_transfer_runs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns', - request_serializer=datatransfer.ScheduleTransferRunsRequest.serialize, - response_deserializer=datatransfer.ScheduleTransferRunsResponse.deserialize, - ) - return self._stubs['schedule_transfer_runs'] - - @property - def start_manual_transfer_runs(self) -> Callable[ - [datatransfer.StartManualTransferRunsRequest], - Awaitable[datatransfer.StartManualTransferRunsResponse]]: - r"""Return a callable for the start manual transfer runs method over gRPC. - - Start manual transfer runs to be executed now with schedule_time - equal to current time. The transfer runs can be created for a - time range where the run_time is between start_time (inclusive) - and end_time (exclusive), or for a specific run_time. - - Returns: - Callable[[~.StartManualTransferRunsRequest], - Awaitable[~.StartManualTransferRunsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_manual_transfer_runs' not in self._stubs: - self._stubs['start_manual_transfer_runs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/StartManualTransferRuns', - request_serializer=datatransfer.StartManualTransferRunsRequest.serialize, - response_deserializer=datatransfer.StartManualTransferRunsResponse.deserialize, - ) - return self._stubs['start_manual_transfer_runs'] - - @property - def get_transfer_run(self) -> Callable[ - [datatransfer.GetTransferRunRequest], - Awaitable[transfer.TransferRun]]: - r"""Return a callable for the get transfer run method over gRPC. - - Returns information about the particular transfer - run. - - Returns: - Callable[[~.GetTransferRunRequest], - Awaitable[~.TransferRun]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_transfer_run' not in self._stubs: - self._stubs['get_transfer_run'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun', - request_serializer=datatransfer.GetTransferRunRequest.serialize, - response_deserializer=transfer.TransferRun.deserialize, - ) - return self._stubs['get_transfer_run'] - - @property - def delete_transfer_run(self) -> Callable[ - [datatransfer.DeleteTransferRunRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete transfer run method over gRPC. - - Deletes the specified transfer run. - - Returns: - Callable[[~.DeleteTransferRunRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_transfer_run' not in self._stubs: - self._stubs['delete_transfer_run'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun', - request_serializer=datatransfer.DeleteTransferRunRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_transfer_run'] - - @property - def list_transfer_runs(self) -> Callable[ - [datatransfer.ListTransferRunsRequest], - Awaitable[datatransfer.ListTransferRunsResponse]]: - r"""Return a callable for the list transfer runs method over gRPC. - - Returns information about running and completed - transfer runs. - - Returns: - Callable[[~.ListTransferRunsRequest], - Awaitable[~.ListTransferRunsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_transfer_runs' not in self._stubs: - self._stubs['list_transfer_runs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns', - request_serializer=datatransfer.ListTransferRunsRequest.serialize, - response_deserializer=datatransfer.ListTransferRunsResponse.deserialize, - ) - return self._stubs['list_transfer_runs'] - - @property - def list_transfer_logs(self) -> Callable[ - [datatransfer.ListTransferLogsRequest], - Awaitable[datatransfer.ListTransferLogsResponse]]: - r"""Return a callable for the list transfer logs method over gRPC. - - Returns log messages for the transfer run. - - Returns: - Callable[[~.ListTransferLogsRequest], - Awaitable[~.ListTransferLogsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_transfer_logs' not in self._stubs: - self._stubs['list_transfer_logs'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs', - request_serializer=datatransfer.ListTransferLogsRequest.serialize, - response_deserializer=datatransfer.ListTransferLogsResponse.deserialize, - ) - return self._stubs['list_transfer_logs'] - - @property - def check_valid_creds(self) -> Callable[ - [datatransfer.CheckValidCredsRequest], - Awaitable[datatransfer.CheckValidCredsResponse]]: - r"""Return a callable for the check valid creds method over gRPC. - - Returns true if valid credentials exist for the given - data source and requesting user. - - Returns: - Callable[[~.CheckValidCredsRequest], - Awaitable[~.CheckValidCredsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'check_valid_creds' not in self._stubs: - self._stubs['check_valid_creds'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds', - request_serializer=datatransfer.CheckValidCredsRequest.serialize, - response_deserializer=datatransfer.CheckValidCredsResponse.deserialize, - ) - return self._stubs['check_valid_creds'] - - @property - def enroll_data_sources(self) -> Callable[ - [datatransfer.EnrollDataSourcesRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the enroll data sources method over gRPC. - - Enroll data sources in a user project. This allows users to - create transfer configurations for these data sources. They will - also appear in the ListDataSources RPC and as such, will appear - in the `BigQuery - UI `__, and the - documents can be found in the public guide for `BigQuery Web - UI `__ and - `Data Transfer - Service `__. - - Returns: - Callable[[~.EnrollDataSourcesRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'enroll_data_sources' not in self._stubs: - self._stubs['enroll_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/EnrollDataSources', - request_serializer=datatransfer.EnrollDataSourcesRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['enroll_data_sources'] - - @property - def unenroll_data_sources(self) -> Callable[ - [datatransfer.UnenrollDataSourcesRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the unenroll data sources method over gRPC. - - Unenroll data sources in a user project. This allows users to - remove transfer configurations for these data sources. They will - no longer appear in the ListDataSources RPC and will also no - longer appear in the `BigQuery - UI `__. Data - transfers configurations of unenrolled data sources will not be - scheduled. - - Returns: - Callable[[~.UnenrollDataSourcesRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'unenroll_data_sources' not in self._stubs: - self._stubs['unenroll_data_sources'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.datatransfer.v1.DataTransferService/UnenrollDataSources', - request_serializer=datatransfer.UnenrollDataSourcesRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['unenroll_data_sources'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.get_data_source: self._wrap_method( - self.get_data_source, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_data_sources: self._wrap_method( - self.list_data_sources, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.create_transfer_config: self._wrap_method( - self.create_transfer_config, - default_timeout=30.0, - client_info=client_info, - ), - self.update_transfer_config: self._wrap_method( - self.update_transfer_config, - default_timeout=30.0, - client_info=client_info, - ), - self.delete_transfer_config: self._wrap_method( - self.delete_transfer_config, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.get_transfer_config: self._wrap_method( - self.get_transfer_config, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_transfer_configs: self._wrap_method( - self.list_transfer_configs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.schedule_transfer_runs: self._wrap_method( - self.schedule_transfer_runs, - default_timeout=30.0, - client_info=client_info, - ), - self.start_manual_transfer_runs: self._wrap_method( - self.start_manual_transfer_runs, - default_timeout=None, - client_info=client_info, - ), - self.get_transfer_run: self._wrap_method( - self.get_transfer_run, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.delete_transfer_run: self._wrap_method( - self.delete_transfer_run, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_transfer_runs: self._wrap_method( - self.list_transfer_runs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.list_transfer_logs: self._wrap_method( - self.list_transfer_logs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.check_valid_creds: self._wrap_method( - self.check_valid_creds, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=20.0, - ), - default_timeout=20.0, - client_info=client_info, - ), - self.enroll_data_sources: self._wrap_method( - self.enroll_data_sources, - default_timeout=None, - client_info=client_info, - ), - self.unenroll_data_sources: self._wrap_method( - self.unenroll_data_sources, - default_timeout=None, - client_info=client_info, - ), - self.get_location: self._wrap_method( - self.get_location, - default_timeout=None, - client_info=client_info, - ), - self.list_locations: self._wrap_method( - self.list_locations, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - @property - def list_locations( - self, - ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/ListLocations", - request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, - response_deserializer=locations_pb2.ListLocationsResponse.FromString, - ) - return self._stubs["list_locations"] - - @property - def get_location( - self, - ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_location" not in self._stubs: - self._stubs["get_location"] = self._logged_channel.unary_unary( - "/google.cloud.location.Locations/GetLocation", - request_serializer=locations_pb2.GetLocationRequest.SerializeToString, - response_deserializer=locations_pb2.Location.FromString, - ) - return self._stubs["get_location"] - - -__all__ = ( - 'DataTransferServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py deleted file mode 100644 index 44fa1b9a52e6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py +++ /dev/null @@ -1,3035 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseDataTransferServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class DataTransferServiceRestInterceptor: - """Interceptor for DataTransferService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the DataTransferServiceRestTransport. - - .. code-block:: python - class MyCustomDataTransferServiceInterceptor(DataTransferServiceRestInterceptor): - def pre_check_valid_creds(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_check_valid_creds(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_transfer_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_transfer_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_transfer_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_transfer_run(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_enroll_data_sources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_data_source(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_data_source(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_transfer_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_transfer_config(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_transfer_run(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_transfer_run(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_data_sources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_data_sources(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_transfer_configs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_transfer_configs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_transfer_logs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_transfer_logs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_transfer_runs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_transfer_runs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_schedule_transfer_runs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_schedule_transfer_runs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_start_manual_transfer_runs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_start_manual_transfer_runs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_unenroll_data_sources(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_update_transfer_config(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_transfer_config(self, response): - logging.log(f"Received response: {response}") - return response - - transport = DataTransferServiceRestTransport(interceptor=MyCustomDataTransferServiceInterceptor()) - client = DataTransferServiceClient(transport=transport) - - - """ - def pre_check_valid_creds(self, request: datatransfer.CheckValidCredsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.CheckValidCredsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for check_valid_creds - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_check_valid_creds(self, response: datatransfer.CheckValidCredsResponse) -> datatransfer.CheckValidCredsResponse: - """Post-rpc interceptor for check_valid_creds - - DEPRECATED. Please use the `post_check_valid_creds_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_check_valid_creds` interceptor runs - before the `post_check_valid_creds_with_metadata` interceptor. - """ - return response - - def post_check_valid_creds_with_metadata(self, response: datatransfer.CheckValidCredsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.CheckValidCredsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for check_valid_creds - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_check_valid_creds_with_metadata` - interceptor in new development instead of the `post_check_valid_creds` interceptor. - When both interceptors are used, this `post_check_valid_creds_with_metadata` interceptor runs after the - `post_check_valid_creds` interceptor. The (possibly modified) response returned by - `post_check_valid_creds` will be passed to - `post_check_valid_creds_with_metadata`. - """ - return response, metadata - - def pre_create_transfer_config(self, request: datatransfer.CreateTransferConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.CreateTransferConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_transfer_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_create_transfer_config(self, response: transfer.TransferConfig) -> transfer.TransferConfig: - """Post-rpc interceptor for create_transfer_config - - DEPRECATED. Please use the `post_create_transfer_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_create_transfer_config` interceptor runs - before the `post_create_transfer_config_with_metadata` interceptor. - """ - return response - - def post_create_transfer_config_with_metadata(self, response: transfer.TransferConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transfer.TransferConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_transfer_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_create_transfer_config_with_metadata` - interceptor in new development instead of the `post_create_transfer_config` interceptor. - When both interceptors are used, this `post_create_transfer_config_with_metadata` interceptor runs after the - `post_create_transfer_config` interceptor. The (possibly modified) response returned by - `post_create_transfer_config` will be passed to - `post_create_transfer_config_with_metadata`. - """ - return response, metadata - - def pre_delete_transfer_config(self, request: datatransfer.DeleteTransferConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.DeleteTransferConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_transfer_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def pre_delete_transfer_run(self, request: datatransfer.DeleteTransferRunRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.DeleteTransferRunRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_transfer_run - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def pre_enroll_data_sources(self, request: datatransfer.EnrollDataSourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.EnrollDataSourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for enroll_data_sources - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def pre_get_data_source(self, request: datatransfer.GetDataSourceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.GetDataSourceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_data_source - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_get_data_source(self, response: datatransfer.DataSource) -> datatransfer.DataSource: - """Post-rpc interceptor for get_data_source - - DEPRECATED. Please use the `post_get_data_source_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_get_data_source` interceptor runs - before the `post_get_data_source_with_metadata` interceptor. - """ - return response - - def post_get_data_source_with_metadata(self, response: datatransfer.DataSource, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.DataSource, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_data_source - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_get_data_source_with_metadata` - interceptor in new development instead of the `post_get_data_source` interceptor. - When both interceptors are used, this `post_get_data_source_with_metadata` interceptor runs after the - `post_get_data_source` interceptor. The (possibly modified) response returned by - `post_get_data_source` will be passed to - `post_get_data_source_with_metadata`. - """ - return response, metadata - - def pre_get_transfer_config(self, request: datatransfer.GetTransferConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.GetTransferConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_transfer_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_get_transfer_config(self, response: transfer.TransferConfig) -> transfer.TransferConfig: - """Post-rpc interceptor for get_transfer_config - - DEPRECATED. Please use the `post_get_transfer_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_get_transfer_config` interceptor runs - before the `post_get_transfer_config_with_metadata` interceptor. - """ - return response - - def post_get_transfer_config_with_metadata(self, response: transfer.TransferConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transfer.TransferConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_transfer_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_get_transfer_config_with_metadata` - interceptor in new development instead of the `post_get_transfer_config` interceptor. - When both interceptors are used, this `post_get_transfer_config_with_metadata` interceptor runs after the - `post_get_transfer_config` interceptor. The (possibly modified) response returned by - `post_get_transfer_config` will be passed to - `post_get_transfer_config_with_metadata`. - """ - return response, metadata - - def pre_get_transfer_run(self, request: datatransfer.GetTransferRunRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.GetTransferRunRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_transfer_run - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_get_transfer_run(self, response: transfer.TransferRun) -> transfer.TransferRun: - """Post-rpc interceptor for get_transfer_run - - DEPRECATED. Please use the `post_get_transfer_run_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_get_transfer_run` interceptor runs - before the `post_get_transfer_run_with_metadata` interceptor. - """ - return response - - def post_get_transfer_run_with_metadata(self, response: transfer.TransferRun, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transfer.TransferRun, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_transfer_run - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_get_transfer_run_with_metadata` - interceptor in new development instead of the `post_get_transfer_run` interceptor. - When both interceptors are used, this `post_get_transfer_run_with_metadata` interceptor runs after the - `post_get_transfer_run` interceptor. The (possibly modified) response returned by - `post_get_transfer_run` will be passed to - `post_get_transfer_run_with_metadata`. - """ - return response, metadata - - def pre_list_data_sources(self, request: datatransfer.ListDataSourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListDataSourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_data_sources - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_list_data_sources(self, response: datatransfer.ListDataSourcesResponse) -> datatransfer.ListDataSourcesResponse: - """Post-rpc interceptor for list_data_sources - - DEPRECATED. Please use the `post_list_data_sources_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_list_data_sources` interceptor runs - before the `post_list_data_sources_with_metadata` interceptor. - """ - return response - - def post_list_data_sources_with_metadata(self, response: datatransfer.ListDataSourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListDataSourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_data_sources - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_list_data_sources_with_metadata` - interceptor in new development instead of the `post_list_data_sources` interceptor. - When both interceptors are used, this `post_list_data_sources_with_metadata` interceptor runs after the - `post_list_data_sources` interceptor. The (possibly modified) response returned by - `post_list_data_sources` will be passed to - `post_list_data_sources_with_metadata`. - """ - return response, metadata - - def pre_list_transfer_configs(self, request: datatransfer.ListTransferConfigsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListTransferConfigsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_transfer_configs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_list_transfer_configs(self, response: datatransfer.ListTransferConfigsResponse) -> datatransfer.ListTransferConfigsResponse: - """Post-rpc interceptor for list_transfer_configs - - DEPRECATED. Please use the `post_list_transfer_configs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_list_transfer_configs` interceptor runs - before the `post_list_transfer_configs_with_metadata` interceptor. - """ - return response - - def post_list_transfer_configs_with_metadata(self, response: datatransfer.ListTransferConfigsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListTransferConfigsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_transfer_configs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_list_transfer_configs_with_metadata` - interceptor in new development instead of the `post_list_transfer_configs` interceptor. - When both interceptors are used, this `post_list_transfer_configs_with_metadata` interceptor runs after the - `post_list_transfer_configs` interceptor. The (possibly modified) response returned by - `post_list_transfer_configs` will be passed to - `post_list_transfer_configs_with_metadata`. - """ - return response, metadata - - def pre_list_transfer_logs(self, request: datatransfer.ListTransferLogsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListTransferLogsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_transfer_logs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_list_transfer_logs(self, response: datatransfer.ListTransferLogsResponse) -> datatransfer.ListTransferLogsResponse: - """Post-rpc interceptor for list_transfer_logs - - DEPRECATED. Please use the `post_list_transfer_logs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_list_transfer_logs` interceptor runs - before the `post_list_transfer_logs_with_metadata` interceptor. - """ - return response - - def post_list_transfer_logs_with_metadata(self, response: datatransfer.ListTransferLogsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListTransferLogsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_transfer_logs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_list_transfer_logs_with_metadata` - interceptor in new development instead of the `post_list_transfer_logs` interceptor. - When both interceptors are used, this `post_list_transfer_logs_with_metadata` interceptor runs after the - `post_list_transfer_logs` interceptor. The (possibly modified) response returned by - `post_list_transfer_logs` will be passed to - `post_list_transfer_logs_with_metadata`. - """ - return response, metadata - - def pre_list_transfer_runs(self, request: datatransfer.ListTransferRunsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListTransferRunsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_transfer_runs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_list_transfer_runs(self, response: datatransfer.ListTransferRunsResponse) -> datatransfer.ListTransferRunsResponse: - """Post-rpc interceptor for list_transfer_runs - - DEPRECATED. Please use the `post_list_transfer_runs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_list_transfer_runs` interceptor runs - before the `post_list_transfer_runs_with_metadata` interceptor. - """ - return response - - def post_list_transfer_runs_with_metadata(self, response: datatransfer.ListTransferRunsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ListTransferRunsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_transfer_runs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_list_transfer_runs_with_metadata` - interceptor in new development instead of the `post_list_transfer_runs` interceptor. - When both interceptors are used, this `post_list_transfer_runs_with_metadata` interceptor runs after the - `post_list_transfer_runs` interceptor. The (possibly modified) response returned by - `post_list_transfer_runs` will be passed to - `post_list_transfer_runs_with_metadata`. - """ - return response, metadata - - def pre_schedule_transfer_runs(self, request: datatransfer.ScheduleTransferRunsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ScheduleTransferRunsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for schedule_transfer_runs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_schedule_transfer_runs(self, response: datatransfer.ScheduleTransferRunsResponse) -> datatransfer.ScheduleTransferRunsResponse: - """Post-rpc interceptor for schedule_transfer_runs - - DEPRECATED. Please use the `post_schedule_transfer_runs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_schedule_transfer_runs` interceptor runs - before the `post_schedule_transfer_runs_with_metadata` interceptor. - """ - return response - - def post_schedule_transfer_runs_with_metadata(self, response: datatransfer.ScheduleTransferRunsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.ScheduleTransferRunsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for schedule_transfer_runs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_schedule_transfer_runs_with_metadata` - interceptor in new development instead of the `post_schedule_transfer_runs` interceptor. - When both interceptors are used, this `post_schedule_transfer_runs_with_metadata` interceptor runs after the - `post_schedule_transfer_runs` interceptor. The (possibly modified) response returned by - `post_schedule_transfer_runs` will be passed to - `post_schedule_transfer_runs_with_metadata`. - """ - return response, metadata - - def pre_start_manual_transfer_runs(self, request: datatransfer.StartManualTransferRunsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.StartManualTransferRunsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for start_manual_transfer_runs - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_start_manual_transfer_runs(self, response: datatransfer.StartManualTransferRunsResponse) -> datatransfer.StartManualTransferRunsResponse: - """Post-rpc interceptor for start_manual_transfer_runs - - DEPRECATED. Please use the `post_start_manual_transfer_runs_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_start_manual_transfer_runs` interceptor runs - before the `post_start_manual_transfer_runs_with_metadata` interceptor. - """ - return response - - def post_start_manual_transfer_runs_with_metadata(self, response: datatransfer.StartManualTransferRunsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.StartManualTransferRunsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for start_manual_transfer_runs - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_start_manual_transfer_runs_with_metadata` - interceptor in new development instead of the `post_start_manual_transfer_runs` interceptor. - When both interceptors are used, this `post_start_manual_transfer_runs_with_metadata` interceptor runs after the - `post_start_manual_transfer_runs` interceptor. The (possibly modified) response returned by - `post_start_manual_transfer_runs` will be passed to - `post_start_manual_transfer_runs_with_metadata`. - """ - return response, metadata - - def pre_unenroll_data_sources(self, request: datatransfer.UnenrollDataSourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.UnenrollDataSourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for unenroll_data_sources - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def pre_update_transfer_config(self, request: datatransfer.UpdateTransferConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[datatransfer.UpdateTransferConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_transfer_config - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_update_transfer_config(self, response: transfer.TransferConfig) -> transfer.TransferConfig: - """Post-rpc interceptor for update_transfer_config - - DEPRECATED. Please use the `post_update_transfer_config_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. This `post_update_transfer_config` interceptor runs - before the `post_update_transfer_config_with_metadata` interceptor. - """ - return response - - def post_update_transfer_config_with_metadata(self, response: transfer.TransferConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[transfer.TransferConfig, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_transfer_config - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the DataTransferService server but before it is returned to user code. - - We recommend only using this `post_update_transfer_config_with_metadata` - interceptor in new development instead of the `post_update_transfer_config` interceptor. - When both interceptors are used, this `post_update_transfer_config_with_metadata` interceptor runs after the - `post_update_transfer_config` interceptor. The (possibly modified) response returned by - `post_update_transfer_config` will be passed to - `post_update_transfer_config_with_metadata`. - """ - return response, metadata - - def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_location - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location - - Override in a subclass to manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. - """ - return response - - def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_locations - - Override in a subclass to manipulate the request or metadata - before they are sent to the DataTransferService server. - """ - return request, metadata - - def post_list_locations( - self, response: locations_pb2.ListLocationsResponse - ) -> locations_pb2.ListLocationsResponse: - """Post-rpc interceptor for list_locations - - Override in a subclass to manipulate the response - after it is returned by the DataTransferService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class DataTransferServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: DataTransferServiceRestInterceptor - - -class DataTransferServiceRestTransport(_BaseDataTransferServiceRestTransport): - """REST backend synchronous transport for DataTransferService. - - This API allows users to manage their data transfers into - BigQuery. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigquerydatatransfer.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[DataTransferServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatatransfer.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or DataTransferServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CheckValidCreds(_BaseDataTransferServiceRestTransport._BaseCheckValidCreds, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.CheckValidCreds") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.CheckValidCredsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.CheckValidCredsResponse: - r"""Call the check valid creds method over HTTP. - - Args: - request (~.datatransfer.CheckValidCredsRequest): - The request object. A request to determine whether the - user has valid credentials. This method - is used to limit the number of OAuth - popups in the user interface. The user - id is inferred from the API call - context. If the data source has the - Google+ authorization type, this method - returns false, as it cannot be - determined whether the credentials are - already valid merely based on the user - id. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.CheckValidCredsResponse: - A response indicating whether the - credentials exist and are valid. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseCheckValidCreds._get_http_options() - - request, metadata = self._interceptor.pre_check_valid_creds(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseCheckValidCreds._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseCheckValidCreds._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseCheckValidCreds._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.CheckValidCreds", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "CheckValidCreds", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._CheckValidCreds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.CheckValidCredsResponse() - pb_resp = datatransfer.CheckValidCredsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_check_valid_creds(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_check_valid_creds_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.CheckValidCredsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.check_valid_creds", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "CheckValidCreds", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateTransferConfig(_BaseDataTransferServiceRestTransport._BaseCreateTransferConfig, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.CreateTransferConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.CreateTransferConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> transfer.TransferConfig: - r"""Call the create transfer config method over HTTP. - - Args: - request (~.datatransfer.CreateTransferConfigRequest): - The request object. A request to create a data transfer configuration. If - new credentials are needed for this transfer - configuration, authorization info must be provided. If - authorization info is provided, the transfer - configuration will be associated with the user id - corresponding to the authorization info. Otherwise, the - transfer configuration will be associated with the - calling user. - - When using a cross project service account for creating - a transfer config, you must enable cross project service - account usage. For more information, see `Disable - attachment of service accounts to resources in other - projects `__. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseCreateTransferConfig._get_http_options() - - request, metadata = self._interceptor.pre_create_transfer_config(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseCreateTransferConfig._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseCreateTransferConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseCreateTransferConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.CreateTransferConfig", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "CreateTransferConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._CreateTransferConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = transfer.TransferConfig() - pb_resp = transfer.TransferConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_transfer_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_transfer_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = transfer.TransferConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.create_transfer_config", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "CreateTransferConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteTransferConfig(_BaseDataTransferServiceRestTransport._BaseDeleteTransferConfig, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.DeleteTransferConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.DeleteTransferConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete transfer config method over HTTP. - - Args: - request (~.datatransfer.DeleteTransferConfigRequest): - The request object. A request to delete data transfer - information. All associated transfer - runs and log messages will be deleted as - well. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseDeleteTransferConfig._get_http_options() - - request, metadata = self._interceptor.pre_delete_transfer_config(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseDeleteTransferConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseDeleteTransferConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.DeleteTransferConfig", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "DeleteTransferConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._DeleteTransferConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteTransferRun(_BaseDataTransferServiceRestTransport._BaseDeleteTransferRun, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.DeleteTransferRun") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.DeleteTransferRunRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete transfer run method over HTTP. - - Args: - request (~.datatransfer.DeleteTransferRunRequest): - The request object. A request to delete data transfer run - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseDeleteTransferRun._get_http_options() - - request, metadata = self._interceptor.pre_delete_transfer_run(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseDeleteTransferRun._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseDeleteTransferRun._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.DeleteTransferRun", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "DeleteTransferRun", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._DeleteTransferRun._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _EnrollDataSources(_BaseDataTransferServiceRestTransport._BaseEnrollDataSources, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.EnrollDataSources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.EnrollDataSourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the enroll data sources method over HTTP. - - Args: - request (~.datatransfer.EnrollDataSourcesRequest): - The request object. A request to enroll a set of data sources so they are - visible in the BigQuery UI's ``Transfer`` tab. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseEnrollDataSources._get_http_options() - - request, metadata = self._interceptor.pre_enroll_data_sources(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseEnrollDataSources._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseEnrollDataSources._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseEnrollDataSources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.EnrollDataSources", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "EnrollDataSources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._EnrollDataSources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetDataSource(_BaseDataTransferServiceRestTransport._BaseGetDataSource, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.GetDataSource") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.GetDataSourceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.DataSource: - r"""Call the get data source method over HTTP. - - Args: - request (~.datatransfer.GetDataSourceRequest): - The request object. A request to get data source info. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.DataSource: - Defines the properties and custom - parameters for a data source. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseGetDataSource._get_http_options() - - request, metadata = self._interceptor.pre_get_data_source(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseGetDataSource._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseGetDataSource._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.GetDataSource", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetDataSource", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._GetDataSource._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.DataSource() - pb_resp = datatransfer.DataSource.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_data_source(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_source_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.DataSource.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.get_data_source", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetDataSource", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTransferConfig(_BaseDataTransferServiceRestTransport._BaseGetTransferConfig, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.GetTransferConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.GetTransferConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> transfer.TransferConfig: - r"""Call the get transfer config method over HTTP. - - Args: - request (~.datatransfer.GetTransferConfigRequest): - The request object. A request to get data transfer - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseGetTransferConfig._get_http_options() - - request, metadata = self._interceptor.pre_get_transfer_config(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseGetTransferConfig._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseGetTransferConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.GetTransferConfig", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetTransferConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._GetTransferConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = transfer.TransferConfig() - pb_resp = transfer.TransferConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_transfer_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_transfer_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = transfer.TransferConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.get_transfer_config", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetTransferConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetTransferRun(_BaseDataTransferServiceRestTransport._BaseGetTransferRun, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.GetTransferRun") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.GetTransferRunRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> transfer.TransferRun: - r"""Call the get transfer run method over HTTP. - - Args: - request (~.datatransfer.GetTransferRunRequest): - The request object. A request to get data transfer run - information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.transfer.TransferRun: - Represents a data transfer run. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseGetTransferRun._get_http_options() - - request, metadata = self._interceptor.pre_get_transfer_run(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseGetTransferRun._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseGetTransferRun._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.GetTransferRun", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetTransferRun", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._GetTransferRun._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = transfer.TransferRun() - pb_resp = transfer.TransferRun.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_transfer_run(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_transfer_run_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = transfer.TransferRun.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.get_transfer_run", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetTransferRun", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListDataSources(_BaseDataTransferServiceRestTransport._BaseListDataSources, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.ListDataSources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.ListDataSourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.ListDataSourcesResponse: - r"""Call the list data sources method over HTTP. - - Args: - request (~.datatransfer.ListDataSourcesRequest): - The request object. Request to list supported data - sources and their data transfer - settings. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.ListDataSourcesResponse: - Returns list of supported data - sources and their metadata. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseListDataSources._get_http_options() - - request, metadata = self._interceptor.pre_list_data_sources(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseListDataSources._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseListDataSources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.ListDataSources", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListDataSources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._ListDataSources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.ListDataSourcesResponse() - pb_resp = datatransfer.ListDataSourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_data_sources(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_sources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.ListDataSourcesResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.list_data_sources", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListDataSources", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTransferConfigs(_BaseDataTransferServiceRestTransport._BaseListTransferConfigs, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.ListTransferConfigs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.ListTransferConfigsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.ListTransferConfigsResponse: - r"""Call the list transfer configs method over HTTP. - - Args: - request (~.datatransfer.ListTransferConfigsRequest): - The request object. A request to list data transfers - configured for a BigQuery project. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.ListTransferConfigsResponse: - The returned list of pipelines in the - project. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseListTransferConfigs._get_http_options() - - request, metadata = self._interceptor.pre_list_transfer_configs(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseListTransferConfigs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseListTransferConfigs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.ListTransferConfigs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListTransferConfigs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._ListTransferConfigs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.ListTransferConfigsResponse() - pb_resp = datatransfer.ListTransferConfigsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_transfer_configs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_transfer_configs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.ListTransferConfigsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.list_transfer_configs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListTransferConfigs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTransferLogs(_BaseDataTransferServiceRestTransport._BaseListTransferLogs, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.ListTransferLogs") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.ListTransferLogsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.ListTransferLogsResponse: - r"""Call the list transfer logs method over HTTP. - - Args: - request (~.datatransfer.ListTransferLogsRequest): - The request object. A request to get user facing log - messages associated with data transfer - run. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.ListTransferLogsResponse: - The returned list transfer run - messages. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseListTransferLogs._get_http_options() - - request, metadata = self._interceptor.pre_list_transfer_logs(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseListTransferLogs._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseListTransferLogs._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.ListTransferLogs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListTransferLogs", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._ListTransferLogs._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.ListTransferLogsResponse() - pb_resp = datatransfer.ListTransferLogsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_transfer_logs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_transfer_logs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.ListTransferLogsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.list_transfer_logs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListTransferLogs", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListTransferRuns(_BaseDataTransferServiceRestTransport._BaseListTransferRuns, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.ListTransferRuns") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: datatransfer.ListTransferRunsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.ListTransferRunsResponse: - r"""Call the list transfer runs method over HTTP. - - Args: - request (~.datatransfer.ListTransferRunsRequest): - The request object. A request to list data transfer runs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.ListTransferRunsResponse: - The returned list of pipelines in the - project. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseListTransferRuns._get_http_options() - - request, metadata = self._interceptor.pre_list_transfer_runs(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseListTransferRuns._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseListTransferRuns._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.ListTransferRuns", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListTransferRuns", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._ListTransferRuns._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.ListTransferRunsResponse() - pb_resp = datatransfer.ListTransferRunsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_transfer_runs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_transfer_runs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.ListTransferRunsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.list_transfer_runs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListTransferRuns", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ScheduleTransferRuns(_BaseDataTransferServiceRestTransport._BaseScheduleTransferRuns, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.ScheduleTransferRuns") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.ScheduleTransferRunsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.ScheduleTransferRunsResponse: - r"""Call the schedule transfer runs method over HTTP. - - Args: - request (~.datatransfer.ScheduleTransferRunsRequest): - The request object. A request to schedule transfer runs - for a time range. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.ScheduleTransferRunsResponse: - A response to schedule transfer runs - for a time range. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseScheduleTransferRuns._get_http_options() - - request, metadata = self._interceptor.pre_schedule_transfer_runs(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseScheduleTransferRuns._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseScheduleTransferRuns._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseScheduleTransferRuns._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.ScheduleTransferRuns", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ScheduleTransferRuns", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._ScheduleTransferRuns._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.ScheduleTransferRunsResponse() - pb_resp = datatransfer.ScheduleTransferRunsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_schedule_transfer_runs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_schedule_transfer_runs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.ScheduleTransferRunsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.schedule_transfer_runs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ScheduleTransferRuns", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _StartManualTransferRuns(_BaseDataTransferServiceRestTransport._BaseStartManualTransferRuns, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.StartManualTransferRuns") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.StartManualTransferRunsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> datatransfer.StartManualTransferRunsResponse: - r"""Call the start manual transfer - runs method over HTTP. - - Args: - request (~.datatransfer.StartManualTransferRunsRequest): - The request object. A request to start manual transfer - runs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.datatransfer.StartManualTransferRunsResponse: - A response to start manual transfer - runs. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseStartManualTransferRuns._get_http_options() - - request, metadata = self._interceptor.pre_start_manual_transfer_runs(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseStartManualTransferRuns._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseStartManualTransferRuns._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseStartManualTransferRuns._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.StartManualTransferRuns", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "StartManualTransferRuns", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._StartManualTransferRuns._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = datatransfer.StartManualTransferRunsResponse() - pb_resp = datatransfer.StartManualTransferRunsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_start_manual_transfer_runs(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_start_manual_transfer_runs_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = datatransfer.StartManualTransferRunsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.start_manual_transfer_runs", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "StartManualTransferRuns", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UnenrollDataSources(_BaseDataTransferServiceRestTransport._BaseUnenrollDataSources, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.UnenrollDataSources") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.UnenrollDataSourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the unenroll data sources method over HTTP. - - Args: - request (~.datatransfer.UnenrollDataSourcesRequest): - The request object. A request to unenroll a set of data sources so they are - no longer visible in the BigQuery UI's ``Transfer`` tab. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseUnenrollDataSources._get_http_options() - - request, metadata = self._interceptor.pre_unenroll_data_sources(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseUnenrollDataSources._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseUnenrollDataSources._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseUnenrollDataSources._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.UnenrollDataSources", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "UnenrollDataSources", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._UnenrollDataSources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _UpdateTransferConfig(_BaseDataTransferServiceRestTransport._BaseUpdateTransferConfig, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.UpdateTransferConfig") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: datatransfer.UpdateTransferConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> transfer.TransferConfig: - r"""Call the update transfer config method over HTTP. - - Args: - request (~.datatransfer.UpdateTransferConfigRequest): - The request object. A request to update a transfer configuration. To update - the user id of the transfer configuration, authorization - info needs to be provided. - - When using a cross project service account for updating - a transfer config, you must enable cross project service - account usage. For more information, see `Disable - attachment of service accounts to resources in other - projects `__. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.transfer.TransferConfig: - Represents a data transfer configuration. A transfer - configuration contains all metadata needed to perform a - data transfer. For example, ``destination_dataset_id`` - specifies where data should be stored. When a new - transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and - shared with the appropriate data source service account. - - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseUpdateTransferConfig._get_http_options() - - request, metadata = self._interceptor.pre_update_transfer_config(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseUpdateTransferConfig._get_transcoded_request(http_options, request) - - body = _BaseDataTransferServiceRestTransport._BaseUpdateTransferConfig._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseUpdateTransferConfig._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.UpdateTransferConfig", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "UpdateTransferConfig", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._UpdateTransferConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = transfer.TransferConfig() - pb_resp = transfer.TransferConfig.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_transfer_config(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_transfer_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = transfer.TransferConfig.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.update_transfer_config", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "UpdateTransferConfig", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def check_valid_creds(self) -> Callable[ - [datatransfer.CheckValidCredsRequest], - datatransfer.CheckValidCredsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CheckValidCreds(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_transfer_config(self) -> Callable[ - [datatransfer.CreateTransferConfigRequest], - transfer.TransferConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateTransferConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_transfer_config(self) -> Callable[ - [datatransfer.DeleteTransferConfigRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTransferConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_transfer_run(self) -> Callable[ - [datatransfer.DeleteTransferRunRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteTransferRun(self._session, self._host, self._interceptor) # type: ignore - - @property - def enroll_data_sources(self) -> Callable[ - [datatransfer.EnrollDataSourcesRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._EnrollDataSources(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_data_source(self) -> Callable[ - [datatransfer.GetDataSourceRequest], - datatransfer.DataSource]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_transfer_config(self) -> Callable[ - [datatransfer.GetTransferConfigRequest], - transfer.TransferConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTransferConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_transfer_run(self) -> Callable[ - [datatransfer.GetTransferRunRequest], - transfer.TransferRun]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTransferRun(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_data_sources(self) -> Callable[ - [datatransfer.ListDataSourcesRequest], - datatransfer.ListDataSourcesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_transfer_configs(self) -> Callable[ - [datatransfer.ListTransferConfigsRequest], - datatransfer.ListTransferConfigsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTransferConfigs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_transfer_logs(self) -> Callable[ - [datatransfer.ListTransferLogsRequest], - datatransfer.ListTransferLogsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTransferLogs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_transfer_runs(self) -> Callable[ - [datatransfer.ListTransferRunsRequest], - datatransfer.ListTransferRunsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListTransferRuns(self._session, self._host, self._interceptor) # type: ignore - - @property - def schedule_transfer_runs(self) -> Callable[ - [datatransfer.ScheduleTransferRunsRequest], - datatransfer.ScheduleTransferRunsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ScheduleTransferRuns(self._session, self._host, self._interceptor) # type: ignore - - @property - def start_manual_transfer_runs(self) -> Callable[ - [datatransfer.StartManualTransferRunsRequest], - datatransfer.StartManualTransferRunsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._StartManualTransferRuns(self._session, self._host, self._interceptor) # type: ignore - - @property - def unenroll_data_sources(self) -> Callable[ - [datatransfer.UnenrollDataSourcesRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UnenrollDataSources(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_transfer_config(self) -> Callable[ - [datatransfer.UpdateTransferConfigRequest], - transfer.TransferConfig]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateTransferConfig(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - - class _GetLocation(_BaseDataTransferServiceRestTransport._BaseGetLocation, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.GetLocation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - - r"""Call the get location method over HTTP. - - Args: - request (locations_pb2.GetLocationRequest): - The request object for GetLocation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.Location: Response from GetLocation method. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseGetLocation._get_http_options() - - request, metadata = self._interceptor.pre_get_location(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.GetLocation", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetLocation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.Location() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceAsyncClient.GetLocation", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "GetLocation", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - - class _ListLocations(_BaseDataTransferServiceRestTransport._BaseListLocations, DataTransferServiceRestStub): - def __hash__(self): - return hash("DataTransferServiceRestTransport.ListLocations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - - r"""Call the list locations method over HTTP. - - Args: - request (locations_pb2.ListLocationsRequest): - The request object for ListLocations method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - locations_pb2.ListLocationsResponse: Response from ListLocations method. - """ - - http_options = _BaseDataTransferServiceRestTransport._BaseListLocations._get_http_options() - - request, metadata = self._interceptor.pre_list_locations(request, metadata) - transcoded_request = _BaseDataTransferServiceRestTransport._BaseListLocations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseDataTransferServiceRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.datatransfer_v1.DataTransferServiceClient.ListLocations", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListLocations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = DataTransferServiceRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - content = response.content.decode("utf-8") - resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(content, resp) - resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = json_format.MessageToJson(resp) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.datatransfer_v1.DataTransferServiceAsyncClient.ListLocations", - extra = { - "serviceName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "rpcName": "ListLocations", - "httpResponse": http_response, - "metadata": http_response["headers"], - }, - ) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'DataTransferServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest_base.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest_base.py deleted file mode 100644 index 68f5017245d3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest_base.py +++ /dev/null @@ -1,872 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore -from .base import DataTransferServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseDataTransferServiceRestTransport(DataTransferServiceTransport): - """Base REST backend transport for DataTransferService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigquerydatatransfer.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerydatatransfer.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCheckValidCreds: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*/dataSources/*}:checkValidCreds', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.CheckValidCredsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseCheckValidCreds._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateTransferConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/transferConfigs', - 'body': 'transfer_config', - }, - { - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/transferConfigs', - 'body': 'transfer_config', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.CreateTransferConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseCreateTransferConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteTransferConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/transferConfigs/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/transferConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.DeleteTransferConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseDeleteTransferConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteTransferRun: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}', - }, - { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/transferConfigs/*/runs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.DeleteTransferRunRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseDeleteTransferRun._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseEnrollDataSources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*}:enrollDataSources', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{name=projects/*}:enrollDataSources', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.EnrollDataSourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseEnrollDataSources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetDataSource: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/dataSources/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/dataSources/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.GetDataSourceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseGetDataSource._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTransferConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/transferConfigs/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/transferConfigs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.GetTransferConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseGetTransferConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetTransferRun: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}', - }, - { - 'method': 'get', - 'uri': '/v1/{name=projects/*/transferConfigs/*/runs/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.GetTransferRunRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseGetTransferRun._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListDataSources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/dataSources', - }, - { - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/dataSources', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.ListDataSourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseListDataSources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTransferConfigs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/transferConfigs', - }, - { - 'method': 'get', - 'uri': '/v1/{parent=projects/*}/transferConfigs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.ListTransferConfigsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseListTransferConfigs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTransferLogs: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs', - }, - { - 'method': 'get', - 'uri': '/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.ListTransferLogsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseListTransferLogs._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListTransferRuns: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs', - }, - { - 'method': 'get', - 'uri': '/v1/{parent=projects/*/transferConfigs/*}/runs', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.ListTransferRunsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseListTransferRuns._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseScheduleTransferRuns: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.ScheduleTransferRunsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseScheduleTransferRuns._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseStartManualTransferRuns: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{parent=projects/*/transferConfigs/*}:startManualRuns', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.StartManualTransferRunsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseStartManualTransferRuns._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUnenrollDataSources: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*}:unenrollDataSources', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.UnenrollDataSourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseUnenrollDataSources._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateTransferConfig: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}', - 'body': 'transfer_config', - }, - { - 'method': 'patch', - 'uri': '/v1/{transfer_config.name=projects/*/transferConfigs/*}', - 'body': 'transfer_config', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = datatransfer.UpdateTransferConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseDataTransferServiceRestTransport._BaseUpdateTransferConfig._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetLocation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - class _BaseListLocations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - return query_params - - -__all__=( - '_BaseDataTransferServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/__init__.py deleted file mode 100644 index df6aaacc0279..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .datatransfer import ( - CheckValidCredsRequest, - CheckValidCredsResponse, - CreateTransferConfigRequest, - DataSource, - DataSourceParameter, - DeleteTransferConfigRequest, - DeleteTransferRunRequest, - EnrollDataSourcesRequest, - GetDataSourceRequest, - GetTransferConfigRequest, - GetTransferRunRequest, - ListDataSourcesRequest, - ListDataSourcesResponse, - ListTransferConfigsRequest, - ListTransferConfigsResponse, - ListTransferLogsRequest, - ListTransferLogsResponse, - ListTransferRunsRequest, - ListTransferRunsResponse, - ScheduleTransferRunsRequest, - ScheduleTransferRunsResponse, - StartManualTransferRunsRequest, - StartManualTransferRunsResponse, - UnenrollDataSourcesRequest, - UpdateTransferConfigRequest, -) -from .transfer import ( - EmailPreferences, - EncryptionConfiguration, - EventDrivenSchedule, - ManualSchedule, - ScheduleOptions, - ScheduleOptionsV2, - TimeBasedSchedule, - TransferConfig, - TransferMessage, - TransferRun, - UserInfo, - TransferState, - TransferType, -) - -__all__ = ( - 'CheckValidCredsRequest', - 'CheckValidCredsResponse', - 'CreateTransferConfigRequest', - 'DataSource', - 'DataSourceParameter', - 'DeleteTransferConfigRequest', - 'DeleteTransferRunRequest', - 'EnrollDataSourcesRequest', - 'GetDataSourceRequest', - 'GetTransferConfigRequest', - 'GetTransferRunRequest', - 'ListDataSourcesRequest', - 'ListDataSourcesResponse', - 'ListTransferConfigsRequest', - 'ListTransferConfigsResponse', - 'ListTransferLogsRequest', - 'ListTransferLogsResponse', - 'ListTransferRunsRequest', - 'ListTransferRunsResponse', - 'ScheduleTransferRunsRequest', - 'ScheduleTransferRunsResponse', - 'StartManualTransferRunsRequest', - 'StartManualTransferRunsResponse', - 'UnenrollDataSourcesRequest', - 'UpdateTransferConfigRequest', - 'EmailPreferences', - 'EncryptionConfiguration', - 'EventDrivenSchedule', - 'ManualSchedule', - 'ScheduleOptions', - 'ScheduleOptionsV2', - 'TimeBasedSchedule', - 'TransferConfig', - 'TransferMessage', - 'TransferRun', - 'UserInfo', - 'TransferState', - 'TransferType', -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py deleted file mode 100644 index 4ad97957d8ec..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ /dev/null @@ -1,1218 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.datatransfer.v1', - manifest={ - 'DataSourceParameter', - 'DataSource', - 'GetDataSourceRequest', - 'ListDataSourcesRequest', - 'ListDataSourcesResponse', - 'CreateTransferConfigRequest', - 'UpdateTransferConfigRequest', - 'GetTransferConfigRequest', - 'DeleteTransferConfigRequest', - 'GetTransferRunRequest', - 'DeleteTransferRunRequest', - 'ListTransferConfigsRequest', - 'ListTransferConfigsResponse', - 'ListTransferRunsRequest', - 'ListTransferRunsResponse', - 'ListTransferLogsRequest', - 'ListTransferLogsResponse', - 'CheckValidCredsRequest', - 'CheckValidCredsResponse', - 'ScheduleTransferRunsRequest', - 'ScheduleTransferRunsResponse', - 'StartManualTransferRunsRequest', - 'StartManualTransferRunsResponse', - 'EnrollDataSourcesRequest', - 'UnenrollDataSourcesRequest', - }, -) - - -class DataSourceParameter(proto.Message): - r"""A parameter used to define custom fields in a data source - definition. - - Attributes: - param_id (str): - Parameter identifier. - display_name (str): - Parameter display name in the user interface. - description (str): - Parameter description. - type_ (google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter.Type): - Parameter type. - required (bool): - Is parameter required. - repeated (bool): - Deprecated. This field has no effect. - validation_regex (str): - Regular expression which can be used for - parameter validation. - allowed_values (MutableSequence[str]): - All possible values for the parameter. - min_value (google.protobuf.wrappers_pb2.DoubleValue): - For integer and double values specifies - minimum allowed value. - max_value (google.protobuf.wrappers_pb2.DoubleValue): - For integer and double values specifies - maximum allowed value. - fields (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter]): - Deprecated. This field has no effect. - validation_description (str): - Description of the requirements for this - field, in case the user input does not fulfill - the regex pattern or min/max values. - validation_help_url (str): - URL to a help document to further explain the - naming requirements. - immutable (bool): - Cannot be changed after initial creation. - recurse (bool): - Deprecated. This field has no effect. - deprecated (bool): - If true, it should not be used in new - transfers, and it should not be visible to - users. - """ - class Type(proto.Enum): - r"""Parameter type. - - Values: - TYPE_UNSPECIFIED (0): - Type unspecified. - STRING (1): - String parameter. - INTEGER (2): - Integer parameter (64-bits). - Will be serialized to json as string. - DOUBLE (3): - Double precision floating point parameter. - BOOLEAN (4): - Boolean parameter. - RECORD (5): - Deprecated. This field has no effect. - PLUS_PAGE (6): - Page ID for a Google+ Page. - LIST (7): - List of strings parameter. - """ - TYPE_UNSPECIFIED = 0 - STRING = 1 - INTEGER = 2 - DOUBLE = 3 - BOOLEAN = 4 - RECORD = 5 - PLUS_PAGE = 6 - LIST = 7 - - param_id: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - description: str = proto.Field( - proto.STRING, - number=3, - ) - type_: Type = proto.Field( - proto.ENUM, - number=4, - enum=Type, - ) - required: bool = proto.Field( - proto.BOOL, - number=5, - ) - repeated: bool = proto.Field( - proto.BOOL, - number=6, - ) - validation_regex: str = proto.Field( - proto.STRING, - number=7, - ) - allowed_values: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - min_value: wrappers_pb2.DoubleValue = proto.Field( - proto.MESSAGE, - number=9, - message=wrappers_pb2.DoubleValue, - ) - max_value: wrappers_pb2.DoubleValue = proto.Field( - proto.MESSAGE, - number=10, - message=wrappers_pb2.DoubleValue, - ) - fields: MutableSequence['DataSourceParameter'] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='DataSourceParameter', - ) - validation_description: str = proto.Field( - proto.STRING, - number=12, - ) - validation_help_url: str = proto.Field( - proto.STRING, - number=13, - ) - immutable: bool = proto.Field( - proto.BOOL, - number=14, - ) - recurse: bool = proto.Field( - proto.BOOL, - number=15, - ) - deprecated: bool = proto.Field( - proto.BOOL, - number=20, - ) - - -class DataSource(proto.Message): - r"""Defines the properties and custom parameters for a data - source. - - Attributes: - name (str): - Output only. Data source resource name. - data_source_id (str): - Data source id. - display_name (str): - User friendly data source name. - description (str): - User friendly data source description string. - client_id (str): - Data source client id which should be used to - receive refresh token. - scopes (MutableSequence[str]): - Api auth scopes for which refresh token needs - to be obtained. These are scopes needed by a - data source to prepare data and ingest them into - BigQuery, e.g., - https://www.googleapis.com/auth/bigquery - transfer_type (google.cloud.bigquery_datatransfer_v1.types.TransferType): - Deprecated. This field has no effect. - supports_multiple_transfers (bool): - Deprecated. This field has no effect. - update_deadline_seconds (int): - The number of seconds to wait for an update - from the data source before the Data Transfer - Service marks the transfer as FAILED. - default_schedule (str): - Default data transfer schedule. Examples of valid schedules - include: ``1st,3rd monday of month 15:30``, - ``every wed,fri of jan,jun 13:15``, and - ``first sunday of quarter 00:00``. - supports_custom_schedule (bool): - Specifies whether the data source supports a user defined - schedule, or operates on the default schedule. When set to - ``true``, user can override default schedule. - parameters (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.DataSourceParameter]): - Data source parameters. - help_url (str): - Url for the help document for this data - source. - authorization_type (google.cloud.bigquery_datatransfer_v1.types.DataSource.AuthorizationType): - Indicates the type of authorization. - data_refresh_type (google.cloud.bigquery_datatransfer_v1.types.DataSource.DataRefreshType): - Specifies whether the data source supports - automatic data refresh for the past few days, - and how it's supported. For some data sources, - data might not be complete until a few days - later, so it's useful to refresh data - automatically. - default_data_refresh_window_days (int): - Default data refresh window on days. Only meaningful when - ``data_refresh_type`` = ``SLIDING_WINDOW``. - manual_runs_disabled (bool): - Disables backfilling and manual run - scheduling for the data source. - minimum_schedule_interval (google.protobuf.duration_pb2.Duration): - The minimum interval for scheduler to - schedule runs. - """ - class AuthorizationType(proto.Enum): - r"""The type of authorization needed for this data source. - - Values: - AUTHORIZATION_TYPE_UNSPECIFIED (0): - Type unspecified. - AUTHORIZATION_CODE (1): - Use OAuth 2 authorization codes that can be - exchanged for a refresh token on the backend. - GOOGLE_PLUS_AUTHORIZATION_CODE (2): - Return an authorization code for a given - Google+ page that can then be exchanged for a - refresh token on the backend. - FIRST_PARTY_OAUTH (3): - Use First Party OAuth. - """ - AUTHORIZATION_TYPE_UNSPECIFIED = 0 - AUTHORIZATION_CODE = 1 - GOOGLE_PLUS_AUTHORIZATION_CODE = 2 - FIRST_PARTY_OAUTH = 3 - - class DataRefreshType(proto.Enum): - r"""Represents how the data source supports data auto refresh. - - Values: - DATA_REFRESH_TYPE_UNSPECIFIED (0): - The data source won't support data auto - refresh, which is default value. - SLIDING_WINDOW (1): - The data source supports data auto refresh, - and runs will be scheduled for the past few - days. Does not allow custom values to be set for - each transfer config. - CUSTOM_SLIDING_WINDOW (2): - The data source supports data auto refresh, - and runs will be scheduled for the past few - days. Allows custom values to be set for each - transfer config. - """ - DATA_REFRESH_TYPE_UNSPECIFIED = 0 - SLIDING_WINDOW = 1 - CUSTOM_SLIDING_WINDOW = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_source_id: str = proto.Field( - proto.STRING, - number=2, - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - description: str = proto.Field( - proto.STRING, - number=4, - ) - client_id: str = proto.Field( - proto.STRING, - number=5, - ) - scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - transfer_type: transfer.TransferType = proto.Field( - proto.ENUM, - number=7, - enum=transfer.TransferType, - ) - supports_multiple_transfers: bool = proto.Field( - proto.BOOL, - number=8, - ) - update_deadline_seconds: int = proto.Field( - proto.INT32, - number=9, - ) - default_schedule: str = proto.Field( - proto.STRING, - number=10, - ) - supports_custom_schedule: bool = proto.Field( - proto.BOOL, - number=11, - ) - parameters: MutableSequence['DataSourceParameter'] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message='DataSourceParameter', - ) - help_url: str = proto.Field( - proto.STRING, - number=13, - ) - authorization_type: AuthorizationType = proto.Field( - proto.ENUM, - number=14, - enum=AuthorizationType, - ) - data_refresh_type: DataRefreshType = proto.Field( - proto.ENUM, - number=15, - enum=DataRefreshType, - ) - default_data_refresh_window_days: int = proto.Field( - proto.INT32, - number=16, - ) - manual_runs_disabled: bool = proto.Field( - proto.BOOL, - number=17, - ) - minimum_schedule_interval: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=18, - message=duration_pb2.Duration, - ) - - -class GetDataSourceRequest(proto.Message): - r"""A request to get data source info. - - Attributes: - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDataSourcesRequest(proto.Message): - r"""Request to list supported data sources and their data - transfer settings. - - Attributes: - parent (str): - Required. The BigQuery project id for which data sources - should be returned. Must be in the form: - ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - page_token (str): - Pagination token, which can be used to request a specific - page of ``ListDataSourcesRequest`` list results. For - multiple-page results, ``ListDataSourcesResponse`` outputs a - ``next_page`` token, which can be used as the ``page_token`` - value to request the next page of list results. - page_size (int): - Page size. The default page size is the - maximum value of 1000 results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - - -class ListDataSourcesResponse(proto.Message): - r"""Returns list of supported data sources and their metadata. - - Attributes: - data_sources (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.DataSource]): - List of supported data sources and their - transfer settings. - next_page_token (str): - Output only. The next-pagination token. For multiple-page - list results, this token can be used as the - ``ListDataSourcesRequest.page_token`` to request the next - page of list results. - """ - - @property - def raw_page(self): - return self - - data_sources: MutableSequence['DataSource'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DataSource', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateTransferConfigRequest(proto.Message): - r"""A request to create a data transfer configuration. If new - credentials are needed for this transfer configuration, - authorization info must be provided. If authorization info is - provided, the transfer configuration will be associated with the - user id corresponding to the authorization info. Otherwise, the - transfer configuration will be associated with the calling user. - - When using a cross project service account for creating a transfer - config, you must enable cross project service account usage. For - more information, see `Disable attachment of service accounts to - resources in other - projects `__. - - Attributes: - parent (str): - Required. The BigQuery project id where the transfer - configuration should be created. Must be in the format - projects/{project_id}/locations/{location_id} or - projects/{project_id}. If specified location and location of - the destination bigquery dataset do not match - the request - will fail. - transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): - Required. Data transfer configuration to - create. - authorization_code (str): - Deprecated: Authorization code was required when - ``transferConfig.dataSourceId`` is 'youtube_channel' but it - is no longer used in any data sources. Use ``version_info`` - instead. - - Optional OAuth2 authorization code to use with this transfer - configuration. This is required only if - ``transferConfig.dataSourceId`` is 'youtube_channel' and new - credentials are needed, as indicated by ``CheckValidCreds``. - In order to obtain authorization_code, make a request to the - following URL: - - .. raw:: html - -
-                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-                
- - - The client_id is the OAuth client_id of the data source - as returned by ListDataSources method. - - data_source_scopes are the scopes returned by - ListDataSources method. - - Note that this should not be set when - ``service_account_name`` is used to create the transfer - config. - version_info (str): - Optional version info. This parameter replaces - ``authorization_code`` which is no longer used in any data - sources. This is required only if - ``transferConfig.dataSourceId`` is 'youtube_channel' *or* - new credentials are needed, as indicated by - ``CheckValidCreds``. In order to obtain version info, make a - request to the following URL: - - .. raw:: html - -
-                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-                
- - - The client_id is the OAuth client_id of the data source - as returned by ListDataSources method. - - data_source_scopes are the scopes returned by - ListDataSources method. - - Note that this should not be set when - ``service_account_name`` is used to create the transfer - config. - service_account_name (str): - Optional service account email. If this field is set, the - transfer config will be created with this service account's - credentials. It requires that the requesting user calling - this API has permissions to act as this service account. - - Note that not all data sources support service account - credentials when creating a transfer config. For the latest - list of data sources, read about `using service - accounts `__. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - transfer_config: transfer.TransferConfig = proto.Field( - proto.MESSAGE, - number=2, - message=transfer.TransferConfig, - ) - authorization_code: str = proto.Field( - proto.STRING, - number=3, - ) - version_info: str = proto.Field( - proto.STRING, - number=5, - ) - service_account_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -class UpdateTransferConfigRequest(proto.Message): - r"""A request to update a transfer configuration. To update the user id - of the transfer configuration, authorization info needs to be - provided. - - When using a cross project service account for updating a transfer - config, you must enable cross project service account usage. For - more information, see `Disable attachment of service accounts to - resources in other - projects `__. - - Attributes: - transfer_config (google.cloud.bigquery_datatransfer_v1.types.TransferConfig): - Required. Data transfer configuration to - create. - authorization_code (str): - Deprecated: Authorization code was required when - ``transferConfig.dataSourceId`` is 'youtube_channel' but it - is no longer used in any data sources. Use ``version_info`` - instead. - - Optional OAuth2 authorization code to use with this transfer - configuration. This is required only if - ``transferConfig.dataSourceId`` is 'youtube_channel' and new - credentials are needed, as indicated by ``CheckValidCreds``. - In order to obtain authorization_code, make a request to the - following URL: - - .. raw:: html - -
-                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=authorization_code&client_id=client_id&scope=data_source_scopes
-                
- - - The client_id is the OAuth client_id of the data source - as returned by ListDataSources method. - - data_source_scopes are the scopes returned by - ListDataSources method. - - Note that this should not be set when - ``service_account_name`` is used to update the transfer - config. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Required list of fields to be - updated in this request. - version_info (str): - Optional version info. This parameter replaces - ``authorization_code`` which is no longer used in any data - sources. This is required only if - ``transferConfig.dataSourceId`` is 'youtube_channel' *or* - new credentials are needed, as indicated by - ``CheckValidCreds``. In order to obtain version info, make a - request to the following URL: - - .. raw:: html - -
-                https://bigquery.cloud.google.com/datatransfer/oauthz/auth?redirect_uri=urn:ietf:wg:oauth:2.0:oob&response_type=version_info&client_id=client_id&scope=data_source_scopes
-                
- - - The client_id is the OAuth client_id of the data source - as returned by ListDataSources method. - - data_source_scopes are the scopes returned by - ListDataSources method. - - Note that this should not be set when - ``service_account_name`` is used to update the transfer - config. - service_account_name (str): - Optional service account email. If this field is set, the - transfer config will be created with this service account's - credentials. It requires that the requesting user calling - this API has permissions to act as this service account. - - Note that not all data sources support service account - credentials when creating a transfer config. For the latest - list of data sources, read about `using service - accounts `__. - """ - - transfer_config: transfer.TransferConfig = proto.Field( - proto.MESSAGE, - number=1, - message=transfer.TransferConfig, - ) - authorization_code: str = proto.Field( - proto.STRING, - number=3, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=4, - message=field_mask_pb2.FieldMask, - ) - version_info: str = proto.Field( - proto.STRING, - number=5, - ) - service_account_name: str = proto.Field( - proto.STRING, - number=6, - ) - - -class GetTransferConfigRequest(proto.Message): - r"""A request to get data transfer information. - - Attributes: - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteTransferConfigRequest(proto.Message): - r"""A request to delete data transfer information. All associated - transfer runs and log messages will be deleted as well. - - Attributes: - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetTransferRunRequest(proto.Message): - r"""A request to get data transfer run information. - - Attributes: - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteTransferRunRequest(proto.Message): - r"""A request to delete data transfer run information. - - Attributes: - name (str): - Required. The field will contain name of the resource - requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListTransferConfigsRequest(proto.Message): - r"""A request to list data transfers configured for a BigQuery - project. - - Attributes: - parent (str): - Required. The BigQuery project id for which transfer configs - should be returned: ``projects/{project_id}`` or - ``projects/{project_id}/locations/{location_id}`` - data_source_ids (MutableSequence[str]): - When specified, only configurations of - requested data sources are returned. - page_token (str): - Pagination token, which can be used to request a specific - page of ``ListTransfersRequest`` list results. For - multiple-page results, ``ListTransfersResponse`` outputs a - ``next_page`` token, which can be used as the ``page_token`` - value to request the next page of list results. - page_size (int): - Page size. The default page size is the - maximum value of 1000 results. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - data_source_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - - -class ListTransferConfigsResponse(proto.Message): - r"""The returned list of pipelines in the project. - - Attributes: - transfer_configs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): - Output only. The stored pipeline transfer - configurations. - next_page_token (str): - Output only. The next-pagination token. For multiple-page - list results, this token can be used as the - ``ListTransferConfigsRequest.page_token`` to request the - next page of list results. - """ - - @property - def raw_page(self): - return self - - transfer_configs: MutableSequence[transfer.TransferConfig] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=transfer.TransferConfig, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListTransferRunsRequest(proto.Message): - r"""A request to list data transfer runs. - - Attributes: - parent (str): - Required. Name of transfer configuration for which transfer - runs should be retrieved. Format of transfer configuration - resource name is: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - states (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferState]): - When specified, only transfer runs with - requested states are returned. - page_token (str): - Pagination token, which can be used to request a specific - page of ``ListTransferRunsRequest`` list results. For - multiple-page results, ``ListTransferRunsResponse`` outputs - a ``next_page`` token, which can be used as the - ``page_token`` value to request the next page of list - results. - page_size (int): - Page size. The default page size is the - maximum value of 1000 results. - run_attempt (google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest.RunAttempt): - Indicates how run attempts are to be pulled. - """ - class RunAttempt(proto.Enum): - r"""Represents which runs should be pulled. - - Values: - RUN_ATTEMPT_UNSPECIFIED (0): - All runs should be returned. - LATEST (1): - Only latest run per day should be returned. - """ - RUN_ATTEMPT_UNSPECIFIED = 0 - LATEST = 1 - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - states: MutableSequence[transfer.TransferState] = proto.RepeatedField( - proto.ENUM, - number=2, - enum=transfer.TransferState, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - run_attempt: RunAttempt = proto.Field( - proto.ENUM, - number=5, - enum=RunAttempt, - ) - - -class ListTransferRunsResponse(proto.Message): - r"""The returned list of pipelines in the project. - - Attributes: - transfer_runs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): - Output only. The stored pipeline transfer - runs. - next_page_token (str): - Output only. The next-pagination token. For multiple-page - list results, this token can be used as the - ``ListTransferRunsRequest.page_token`` to request the next - page of list results. - """ - - @property - def raw_page(self): - return self - - transfer_runs: MutableSequence[transfer.TransferRun] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=transfer.TransferRun, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListTransferLogsRequest(proto.Message): - r"""A request to get user facing log messages associated with - data transfer run. - - Attributes: - parent (str): - Required. Transfer run name in the form: - ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` - or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}`` - page_token (str): - Pagination token, which can be used to request a specific - page of ``ListTransferLogsRequest`` list results. For - multiple-page results, ``ListTransferLogsResponse`` outputs - a ``next_page`` token, which can be used as the - ``page_token`` value to request the next page of list - results. - page_size (int): - Page size. The default page size is the - maximum value of 1000 results. - message_types (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity]): - Message types to return. If not populated - - INFO, WARNING and ERROR messages are returned. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=5, - ) - message_types: MutableSequence[transfer.TransferMessage.MessageSeverity] = proto.RepeatedField( - proto.ENUM, - number=6, - enum=transfer.TransferMessage.MessageSeverity, - ) - - -class ListTransferLogsResponse(proto.Message): - r"""The returned list transfer run messages. - - Attributes: - transfer_messages (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferMessage]): - Output only. The stored pipeline transfer - messages. - next_page_token (str): - Output only. The next-pagination token. For multiple-page - list results, this token can be used as the - ``GetTransferRunLogRequest.page_token`` to request the next - page of list results. - """ - - @property - def raw_page(self): - return self - - transfer_messages: MutableSequence[transfer.TransferMessage] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=transfer.TransferMessage, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CheckValidCredsRequest(proto.Message): - r"""A request to determine whether the user has valid - credentials. This method is used to limit the number of OAuth - popups in the user interface. The user id is inferred from the - API call context. - If the data source has the Google+ authorization type, this - method returns false, as it cannot be determined whether the - credentials are already valid merely based on the user id. - - Attributes: - name (str): - Required. The data source in the form: - ``projects/{project_id}/dataSources/{data_source_id}`` or - ``projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CheckValidCredsResponse(proto.Message): - r"""A response indicating whether the credentials exist and are - valid. - - Attributes: - has_valid_creds (bool): - If set to ``true``, the credentials exist and are valid. - """ - - has_valid_creds: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ScheduleTransferRunsRequest(proto.Message): - r"""A request to schedule transfer runs for a time range. - - Attributes: - parent (str): - Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Required. Start time of the range of transfer runs. For - example, ``"2017-05-25T00:00:00+00:00"``. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Required. End time of the range of transfer runs. For - example, ``"2017-05-30T00:00:00+00:00"``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class ScheduleTransferRunsResponse(proto.Message): - r"""A response to schedule transfer runs for a time range. - - Attributes: - runs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): - The transfer runs that were scheduled. - """ - - runs: MutableSequence[transfer.TransferRun] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=transfer.TransferRun, - ) - - -class StartManualTransferRunsRequest(proto.Message): - r"""A request to start manual transfer runs. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. Transfer configuration name in the form: - ``projects/{project_id}/transferConfigs/{config_id}`` or - ``projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}``. - requested_time_range (google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest.TimeRange): - A time_range start and end timestamp for historical data - files or reports that are scheduled to be transferred by the - scheduled transfer run. requested_time_range must be a past - time and cannot include future time values. - - This field is a member of `oneof`_ ``time``. - requested_run_time (google.protobuf.timestamp_pb2.Timestamp): - A run_time timestamp for historical data files or reports - that are scheduled to be transferred by the scheduled - transfer run. requested_run_time must be a past time and - cannot include future time values. - - This field is a member of `oneof`_ ``time``. - """ - - class TimeRange(proto.Message): - r"""A specification for a time range, this will request transfer runs - with run_time between start_time (inclusive) and end_time - (exclusive). - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. The start_time must be - strictly less than the end_time. Creates transfer runs where - run_time is in the range between start_time (inclusive) and - end_time (exclusive). - end_time (google.protobuf.timestamp_pb2.Timestamp): - End time of the range of transfer runs. For example, - ``"2017-05-30T00:00:00+00:00"``. The end_time must not be in - the future. Creates transfer runs where run_time is in the - range between start_time (inclusive) and end_time - (exclusive). - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - requested_time_range: TimeRange = proto.Field( - proto.MESSAGE, - number=3, - oneof='time', - message=TimeRange, - ) - requested_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - oneof='time', - message=timestamp_pb2.Timestamp, - ) - - -class StartManualTransferRunsResponse(proto.Message): - r"""A response to start manual transfer runs. - - Attributes: - runs (MutableSequence[google.cloud.bigquery_datatransfer_v1.types.TransferRun]): - The transfer runs that were created. - """ - - runs: MutableSequence[transfer.TransferRun] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=transfer.TransferRun, - ) - - -class EnrollDataSourcesRequest(proto.Message): - r"""A request to enroll a set of data sources so they are visible in the - BigQuery UI's ``Transfer`` tab. - - Attributes: - name (str): - Required. The name of the project resource in the form: - ``projects/{project_id}`` - data_source_ids (MutableSequence[str]): - Data sources that are enrolled. It is - required to provide at least one data source id. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_source_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class UnenrollDataSourcesRequest(proto.Message): - r"""A request to unenroll a set of data sources so they are no longer - visible in the BigQuery UI's ``Transfer`` tab. - - Attributes: - name (str): - Required. The name of the project resource in the form: - ``projects/{project_id}`` - data_source_ids (MutableSequence[str]): - Data sources that are unenrolled. It is - required to provide at least one data source id. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - data_source_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/transfer.py deleted file mode 100644 index d6d0ca06df58..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ /dev/null @@ -1,716 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.datatransfer.v1', - manifest={ - 'TransferType', - 'TransferState', - 'EmailPreferences', - 'ScheduleOptions', - 'ScheduleOptionsV2', - 'TimeBasedSchedule', - 'ManualSchedule', - 'EventDrivenSchedule', - 'UserInfo', - 'TransferConfig', - 'EncryptionConfiguration', - 'TransferRun', - 'TransferMessage', - }, -) - - -class TransferType(proto.Enum): - r"""DEPRECATED. Represents data transfer type. - - Values: - TRANSFER_TYPE_UNSPECIFIED (0): - Invalid or Unknown transfer type placeholder. - BATCH (1): - Batch data transfer. - STREAMING (2): - Streaming data transfer. Streaming data - source currently doesn't support multiple - transfer configs per project. - """ - _pb_options = {'deprecated': True} - TRANSFER_TYPE_UNSPECIFIED = 0 - BATCH = 1 - STREAMING = 2 - - -class TransferState(proto.Enum): - r"""Represents data transfer run state. - - Values: - TRANSFER_STATE_UNSPECIFIED (0): - State placeholder (0). - PENDING (2): - Data transfer is scheduled and is waiting to - be picked up by data transfer backend (2). - RUNNING (3): - Data transfer is in progress (3). - SUCCEEDED (4): - Data transfer completed successfully (4). - FAILED (5): - Data transfer failed (5). - CANCELLED (6): - Data transfer is cancelled (6). - """ - TRANSFER_STATE_UNSPECIFIED = 0 - PENDING = 2 - RUNNING = 3 - SUCCEEDED = 4 - FAILED = 5 - CANCELLED = 6 - - -class EmailPreferences(proto.Message): - r"""Represents preferences for sending email notifications for - transfer run events. - - Attributes: - enable_failure_email (bool): - If true, email notifications will be sent on - transfer run failures. - """ - - enable_failure_email: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class ScheduleOptions(proto.Message): - r"""Options customizing the data transfer schedule. - - Attributes: - disable_auto_scheduling (bool): - If true, automatic scheduling of data - transfer runs for this configuration will be - disabled. The runs can be started on ad-hoc - basis using StartManualTransferRuns API. When - automatic scheduling is disabled, the - TransferConfig.schedule field will be ignored. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Specifies time to start scheduling transfer - runs. The first run will be scheduled at or - after the start time according to a recurrence - pattern defined in the schedule string. The - start time can be changed at any moment. The - time when a data transfer can be triggered - manually is not limited by this option. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Defines time to stop scheduling transfer - runs. A transfer run cannot be scheduled at or - after the end time. The end time can be changed - at any moment. The time when a data transfer can - be triggered manually is not limited by this - option. - """ - - disable_auto_scheduling: bool = proto.Field( - proto.BOOL, - number=3, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class ScheduleOptionsV2(proto.Message): - r"""V2 options customizing different types of data transfer - schedule. This field supports existing time-based and manual - transfer schedule. Also supports Event-Driven transfer schedule. - ScheduleOptionsV2 cannot be used together with - ScheduleOptions/Schedule. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - time_based_schedule (google.cloud.bigquery_datatransfer_v1.types.TimeBasedSchedule): - Time based transfer schedule options. This is - the default schedule option. - - This field is a member of `oneof`_ ``schedule``. - manual_schedule (google.cloud.bigquery_datatransfer_v1.types.ManualSchedule): - Manual transfer schedule. If set, the transfer run will not - be auto-scheduled by the system, unless the client invokes - StartManualTransferRuns. This is equivalent to - disable_auto_scheduling = true. - - This field is a member of `oneof`_ ``schedule``. - event_driven_schedule (google.cloud.bigquery_datatransfer_v1.types.EventDrivenSchedule): - Event driven transfer schedule options. If - set, the transfer will be scheduled upon events - arrial. - - This field is a member of `oneof`_ ``schedule``. - """ - - time_based_schedule: 'TimeBasedSchedule' = proto.Field( - proto.MESSAGE, - number=1, - oneof='schedule', - message='TimeBasedSchedule', - ) - manual_schedule: 'ManualSchedule' = proto.Field( - proto.MESSAGE, - number=2, - oneof='schedule', - message='ManualSchedule', - ) - event_driven_schedule: 'EventDrivenSchedule' = proto.Field( - proto.MESSAGE, - number=3, - oneof='schedule', - message='EventDrivenSchedule', - ) - - -class TimeBasedSchedule(proto.Message): - r"""Options customizing the time based transfer schedule. - Options are migrated from the original ScheduleOptions message. - - Attributes: - schedule (str): - Data transfer schedule. If the data source does not support - a custom schedule, this should be empty. If it is empty, the - default value for the data source will be used. The - specified times are in UTC. Examples of valid format: - ``1st,3rd monday of month 15:30``, - ``every wed,fri of jan,jun 13:15``, and - ``first sunday of quarter 00:00``. See more explanation - about the format here: - https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format - - NOTE: The minimum interval time between recurring transfers - depends on the data source; refer to the documentation for - your data source. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Specifies time to start scheduling transfer - runs. The first run will be scheduled at or - after the start time according to a recurrence - pattern defined in the schedule string. The - start time can be changed at any moment. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Defines time to stop scheduling transfer - runs. A transfer run cannot be scheduled at or - after the end time. The end time can be changed - at any moment. - """ - - schedule: str = proto.Field( - proto.STRING, - number=1, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class ManualSchedule(proto.Message): - r"""Options customizing manual transfers schedule. - """ - - -class EventDrivenSchedule(proto.Message): - r"""Options customizing EventDriven transfers schedule. - - Attributes: - pubsub_subscription (str): - Pub/Sub subscription name used to receive - events. Only Google Cloud Storage data source - support this option. Format: - projects/{project}/subscriptions/{subscription} - """ - - pubsub_subscription: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UserInfo(proto.Message): - r"""Information about a user. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - email (str): - E-mail address of the user. - - This field is a member of `oneof`_ ``_email``. - """ - - email: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - - -class TransferConfig(proto.Message): - r"""Represents a data transfer configuration. A transfer configuration - contains all metadata needed to perform a data transfer. For - example, ``destination_dataset_id`` specifies where data should be - stored. When a new transfer configuration is created, the specified - ``destination_dataset_id`` is created when needed and shared with - the appropriate data source service account. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. The resource name of the transfer config. - Transfer config names have the form either - ``projects/{project_id}/locations/{region}/transferConfigs/{config_id}`` - or ``projects/{project_id}/transferConfigs/{config_id}``, - where ``config_id`` is usually a UUID, even though it is not - guaranteed or required. The name is ignored when creating a - transfer config. - destination_dataset_id (str): - The BigQuery target dataset id. - - This field is a member of `oneof`_ ``destination``. - display_name (str): - User specified display name for the data - transfer. - data_source_id (str): - Data source ID. This cannot be changed once - data transfer is created. The full list of - available data source IDs can be returned - through an API call: - - https://cloud.google.com/bigquery-transfer/docs/reference/datatransfer/rest/v1/projects.locations.dataSources/list - params (google.protobuf.struct_pb2.Struct): - Parameters specific to each data source. For - more information see the bq tab in the 'Setting - up a data transfer' section for each data - source. For example the parameters for Cloud - Storage transfers are listed here: - - https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq - schedule (str): - Data transfer schedule. If the data source does not support - a custom schedule, this should be empty. If it is empty, the - default value for the data source will be used. The - specified times are in UTC. Examples of valid format: - ``1st,3rd monday of month 15:30``, - ``every wed,fri of jan,jun 13:15``, and - ``first sunday of quarter 00:00``. See more explanation - about the format here: - https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format - - NOTE: The minimum interval time between recurring transfers - depends on the data source; refer to the documentation for - your data source. - schedule_options (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptions): - Options customizing the data transfer - schedule. - schedule_options_v2 (google.cloud.bigquery_datatransfer_v1.types.ScheduleOptionsV2): - Options customizing different types of data transfer - schedule. This field replaces "schedule" and - "schedule_options" fields. ScheduleOptionsV2 cannot be used - together with ScheduleOptions/Schedule. - data_refresh_window_days (int): - The number of days to look back to automatically refresh the - data. For example, if ``data_refresh_window_days = 10``, - then every day BigQuery reingests data for [today-10, - today-1], rather than ingesting data for just [today-1]. - Only valid if the data source supports the feature. Set the - value to 0 to use the default value. - disabled (bool): - Is this config disabled. When set to true, no - runs will be scheduled for this transfer config. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Data transfer modification time. - Ignored by server on input. - next_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Next time when data transfer - will run. - state (google.cloud.bigquery_datatransfer_v1.types.TransferState): - Output only. State of the most recently - updated transfer run. - user_id (int): - Deprecated. Unique ID of the user on whose - behalf transfer is done. - dataset_region (str): - Output only. Region in which BigQuery dataset - is located. - notification_pubsub_topic (str): - Pub/Sub topic where notifications will be sent after - transfer runs associated with this transfer config finish. - - The format for specifying a pubsub topic is: - ``projects/{project_id}/topics/{topic_id}`` - email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences): - Email notifications will be sent according to - these preferences to the email address of the - user who owns this transfer config. - owner_info (google.cloud.bigquery_datatransfer_v1.types.UserInfo): - Output only. Information about the user whose credentials - are used to transfer data. Populated only for - ``transferConfigs.get`` requests. In case the user - information is not available, this field will not be - populated. - - This field is a member of `oneof`_ ``_owner_info``. - encryption_configuration (google.cloud.bigquery_datatransfer_v1.types.EncryptionConfiguration): - The encryption configuration part. Currently, - it is only used for the optional KMS key name. - The BigQuery service account of your project - must be granted permissions to use the key. Read - methods will return the key name applied in - effect. Write methods will apply the key if it - is present, or otherwise try to apply project - default keys if it is absent. - error (google.rpc.status_pb2.Status): - Output only. Error code with detailed - information about reason of the latest config - failure. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - destination_dataset_id: str = proto.Field( - proto.STRING, - number=2, - oneof='destination', - ) - display_name: str = proto.Field( - proto.STRING, - number=3, - ) - data_source_id: str = proto.Field( - proto.STRING, - number=5, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=9, - message=struct_pb2.Struct, - ) - schedule: str = proto.Field( - proto.STRING, - number=7, - ) - schedule_options: 'ScheduleOptions' = proto.Field( - proto.MESSAGE, - number=24, - message='ScheduleOptions', - ) - schedule_options_v2: 'ScheduleOptionsV2' = proto.Field( - proto.MESSAGE, - number=31, - message='ScheduleOptionsV2', - ) - data_refresh_window_days: int = proto.Field( - proto.INT32, - number=12, - ) - disabled: bool = proto.Field( - proto.BOOL, - number=13, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - next_run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - state: 'TransferState' = proto.Field( - proto.ENUM, - number=10, - enum='TransferState', - ) - user_id: int = proto.Field( - proto.INT64, - number=11, - ) - dataset_region: str = proto.Field( - proto.STRING, - number=14, - ) - notification_pubsub_topic: str = proto.Field( - proto.STRING, - number=15, - ) - email_preferences: 'EmailPreferences' = proto.Field( - proto.MESSAGE, - number=18, - message='EmailPreferences', - ) - owner_info: 'UserInfo' = proto.Field( - proto.MESSAGE, - number=27, - optional=True, - message='UserInfo', - ) - encryption_configuration: 'EncryptionConfiguration' = proto.Field( - proto.MESSAGE, - number=28, - message='EncryptionConfiguration', - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=32, - message=status_pb2.Status, - ) - - -class EncryptionConfiguration(proto.Message): - r"""Represents the encryption configuration for a transfer. - - Attributes: - kms_key_name (google.protobuf.wrappers_pb2.StringValue): - The name of the KMS key used for encrypting - BigQuery data. - """ - - kms_key_name: wrappers_pb2.StringValue = proto.Field( - proto.MESSAGE, - number=1, - message=wrappers_pb2.StringValue, - ) - - -class TransferRun(proto.Message): - r"""Represents a data transfer run. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - Identifier. The resource name of the transfer run. Transfer - run names have the form - ``projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}``. - The name is ignored when creating a transfer run. - schedule_time (google.protobuf.timestamp_pb2.Timestamp): - Minimum time after which a transfer run can - be started. - run_time (google.protobuf.timestamp_pb2.Timestamp): - For batch transfer runs, specifies the date - and time of the data should be ingested. - error_status (google.rpc.status_pb2.Status): - Status of the transfer run. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when transfer run was - started. Parameter ignored by server for input - requests. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Time when transfer run ended. - Parameter ignored by server for input requests. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last time the data transfer run - state was updated. - params (google.protobuf.struct_pb2.Struct): - Output only. Parameters specific to each data - source. For more information see the bq tab in - the 'Setting up a data transfer' section for - each data source. For example the parameters for - Cloud Storage transfers are listed here: - - https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq - destination_dataset_id (str): - Output only. The BigQuery target dataset id. - - This field is a member of `oneof`_ ``destination``. - data_source_id (str): - Output only. Data source id. - state (google.cloud.bigquery_datatransfer_v1.types.TransferState): - Data transfer run state. Ignored for input - requests. - user_id (int): - Deprecated. Unique ID of the user on whose - behalf transfer is done. - schedule (str): - Output only. Describes the schedule of this transfer run if - it was created as part of a regular schedule. For batch - transfer runs that are scheduled manually, this is empty. - NOTE: the system might choose to delay the schedule - depending on the current load, so ``schedule_time`` doesn't - always match this. - notification_pubsub_topic (str): - Output only. Pub/Sub topic where a notification will be sent - after this transfer run finishes. - - The format for specifying a pubsub topic is: - ``projects/{project_id}/topics/{topic_id}`` - email_preferences (google.cloud.bigquery_datatransfer_v1.types.EmailPreferences): - Output only. Email notifications will be sent - according to these preferences to the email - address of the user who owns the transfer config - this run was derived from. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - schedule_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - run_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=10, - message=timestamp_pb2.Timestamp, - ) - error_status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=21, - message=status_pb2.Status, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - params: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=9, - message=struct_pb2.Struct, - ) - destination_dataset_id: str = proto.Field( - proto.STRING, - number=2, - oneof='destination', - ) - data_source_id: str = proto.Field( - proto.STRING, - number=7, - ) - state: 'TransferState' = proto.Field( - proto.ENUM, - number=8, - enum='TransferState', - ) - user_id: int = proto.Field( - proto.INT64, - number=11, - ) - schedule: str = proto.Field( - proto.STRING, - number=12, - ) - notification_pubsub_topic: str = proto.Field( - proto.STRING, - number=23, - ) - email_preferences: 'EmailPreferences' = proto.Field( - proto.MESSAGE, - number=25, - message='EmailPreferences', - ) - - -class TransferMessage(proto.Message): - r"""Represents a user facing message for a particular data - transfer run. - - Attributes: - message_time (google.protobuf.timestamp_pb2.Timestamp): - Time when message was logged. - severity (google.cloud.bigquery_datatransfer_v1.types.TransferMessage.MessageSeverity): - Message severity. - message_text (str): - Message text. - """ - class MessageSeverity(proto.Enum): - r"""Represents data transfer user facing message severity. - - Values: - MESSAGE_SEVERITY_UNSPECIFIED (0): - No severity specified. - INFO (1): - Informational message. - WARNING (2): - Warning message. - ERROR (3): - Error message. - """ - MESSAGE_SEVERITY_UNSPECIFIED = 0 - INFO = 1 - WARNING = 2 - ERROR = 3 - - message_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - severity: MessageSeverity = proto.Field( - proto.ENUM, - number=2, - enum=MessageSeverity, - ) - message_text: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/noxfile.py deleted file mode 100644 index a78c46bdad9e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-datatransfer' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_datatransfer_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_datatransfer_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_async.py deleted file mode 100644 index 8d603dc21e95..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CheckValidCreds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_check_valid_creds(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.CheckValidCredsRequest( - name="name_value", - ) - - # Make the request - response = await client.check_valid_creds(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_sync.py deleted file mode 100644 index ca81b699f0c1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CheckValidCreds -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_check_valid_creds(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.CheckValidCredsRequest( - name="name_value", - ) - - # Make the request - response = client.check_valid_creds(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_async.py deleted file mode 100644 index 4ef38313bf16..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_create_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.CreateTransferConfigRequest( - parent="parent_value", - transfer_config=transfer_config, - ) - - # Make the request - response = await client.create_transfer_config(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_sync.py deleted file mode 100644 index 3e0d576109e6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_create_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.CreateTransferConfigRequest( - parent="parent_value", - transfer_config=transfer_config, - ) - - # Make the request - response = client.create_transfer_config(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_async.py deleted file mode 100644 index c3fc25a35b04..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_delete_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferConfigRequest( - name="name_value", - ) - - # Make the request - await client.delete_transfer_config(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_sync.py deleted file mode 100644 index de10c5662546..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_delete_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferConfigRequest( - name="name_value", - ) - - # Make the request - client.delete_transfer_config(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_async.py deleted file mode 100644 index 094cd51bee63..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTransferRun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_delete_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferRunRequest( - name="name_value", - ) - - # Make the request - await client.delete_transfer_run(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_sync.py deleted file mode 100644 index 787bd775ec38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTransferRun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_delete_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.DeleteTransferRunRequest( - name="name_value", - ) - - # Make the request - client.delete_transfer_run(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_async.py deleted file mode 100644 index f23505b62edc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnrollDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_enroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.EnrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - await client.enroll_data_sources(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_sync.py deleted file mode 100644 index 8f098f4a0c4b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for EnrollDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_enroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.EnrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - client.enroll_data_sources(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_async.py deleted file mode 100644 index 757cf2c86174..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataSource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_get_data_source(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = await client.get_data_source(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_sync.py deleted file mode 100644 index ac1103fe8880..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetDataSource -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_get_data_source(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetDataSourceRequest( - name="name_value", - ) - - # Make the request - response = client.get_data_source(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_async.py deleted file mode 100644 index 5e6a469d0f8b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_get_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferConfigRequest( - name="name_value", - ) - - # Make the request - response = await client.get_transfer_config(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_sync.py deleted file mode 100644 index f1e377a717db..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_get_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferConfigRequest( - name="name_value", - ) - - # Make the request - response = client.get_transfer_config(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_async.py deleted file mode 100644 index d12fc2a99581..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTransferRun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_get_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferRunRequest( - name="name_value", - ) - - # Make the request - response = await client.get_transfer_run(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_sync.py deleted file mode 100644 index cc07aca3a201..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTransferRun -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_get_transfer_run(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.GetTransferRunRequest( - name="name_value", - ) - - # Make the request - response = client.get_transfer_run(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_async.py deleted file mode 100644 index a905166f554a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_list_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_sync.py deleted file mode 100644 index 474991b4c394..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_list_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListDataSourcesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_data_sources(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_async.py deleted file mode 100644 index 418500f15cf6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTransferConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_list_transfer_configs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_configs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_sync.py deleted file mode 100644 index 3144d4934159..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTransferConfigs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_list_transfer_configs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferConfigsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_configs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_async.py deleted file mode 100644 index 9103454ae078..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTransferLogs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_list_transfer_logs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferLogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_logs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_sync.py deleted file mode 100644 index 5f242050e8cd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTransferLogs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_list_transfer_logs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferLogsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_logs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_async.py deleted file mode 100644 index 2093d8ac6819..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTransferRuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_list_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_runs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_sync.py deleted file mode 100644 index 95a8a962936a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTransferRuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_list_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ListTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_transfer_runs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_async.py deleted file mode 100644 index 4cc42268ee84..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ScheduleTransferRuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_schedule_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ScheduleTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.schedule_transfer_runs(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_sync.py deleted file mode 100644 index 88badd334f39..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ScheduleTransferRuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_schedule_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.ScheduleTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = client.schedule_transfer_runs(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_async.py deleted file mode 100644 index aac3bbfa671f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartManualTransferRuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_start_manual_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.StartManualTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = await client.start_manual_transfer_runs(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_sync.py deleted file mode 100644 index e7114bb54c8d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartManualTransferRuns -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_start_manual_transfer_runs(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.StartManualTransferRunsRequest( - parent="parent_value", - ) - - # Make the request - response = client.start_manual_transfer_runs(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_async.py deleted file mode 100644 index f85eea5c3d48..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UnenrollDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_unenroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.UnenrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - await client.unenroll_data_sources(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_sync.py deleted file mode 100644 index 9f7ba1c0cc86..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UnenrollDataSources -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_unenroll_data_sources(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - request = bigquery_datatransfer_v1.UnenrollDataSourcesRequest( - name="name_value", - ) - - # Make the request - client.unenroll_data_sources(request=request) - - -# [END bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_async.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_async.py deleted file mode 100644 index 4cb4820ced71..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_async.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -async def sample_update_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceAsyncClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.UpdateTransferConfigRequest( - transfer_config=transfer_config, - ) - - # Make the request - response = await client.update_transfer_config(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_async] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_sync.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_sync.py deleted file mode 100644 index 984701abb0b3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_sync.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTransferConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-datatransfer - - -# [START bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_datatransfer_v1 - - -def sample_update_transfer_config(): - # Create a client - client = bigquery_datatransfer_v1.DataTransferServiceClient() - - # Initialize request argument(s) - transfer_config = bigquery_datatransfer_v1.TransferConfig() - transfer_config.destination_dataset_id = "destination_dataset_id_value" - - request = bigquery_datatransfer_v1.UpdateTransferConfigRequest( - transfer_config=transfer_config, - ) - - # Make the request - response = client.update_transfer_config(request=request) - - # Handle the response - print(response) - -# [END bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json deleted file mode 100644 index b0668de9f1d8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ /dev/null @@ -1,2575 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.datatransfer.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-datatransfer", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.check_valid_creds", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "CheckValidCreds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse", - "shortName": "check_valid_creds" - }, - "description": "Sample for CheckValidCreds", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.check_valid_creds", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CheckValidCreds", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "CheckValidCreds" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.CheckValidCredsResponse", - "shortName": "check_valid_creds" - }, - "description": "Sample for CheckValidCreds", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_CheckValidCreds_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_check_valid_creds_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.create_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "CreateTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "transfer_config", - "type": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig", - "shortName": "create_transfer_config" - }, - "description": "Sample for CreateTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.create_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.CreateTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "CreateTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.CreateTransferConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "transfer_config", - "type": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig", - "shortName": "create_transfer_config" - }, - "description": "Sample for CreateTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_CreateTransferConfig_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_create_transfer_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.delete_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "DeleteTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_transfer_config" - }, - "description": "Sample for DeleteTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.delete_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "DeleteTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.DeleteTransferConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_transfer_config" - }, - "description": "Sample for DeleteTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferConfig_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.delete_transfer_run", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "DeleteTransferRun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_transfer_run" - }, - "description": "Sample for DeleteTransferRun", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.delete_transfer_run", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.DeleteTransferRun", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "DeleteTransferRun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.DeleteTransferRunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_transfer_run" - }, - "description": "Sample for DeleteTransferRun", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_DeleteTransferRun_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_delete_transfer_run_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.enroll_data_sources", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.EnrollDataSources", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "EnrollDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "enroll_data_sources" - }, - "description": "Sample for EnrollDataSources", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.enroll_data_sources", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.EnrollDataSources", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "EnrollDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.EnrollDataSourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "enroll_data_sources" - }, - "description": "Sample for EnrollDataSources", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_EnrollDataSources_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_enroll_data_sources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.get_data_source", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "GetDataSource" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.DataSource", - "shortName": "get_data_source" - }, - "description": "Sample for GetDataSource", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.get_data_source", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetDataSource", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "GetDataSource" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.GetDataSourceRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.DataSource", - "shortName": "get_data_source" - }, - "description": "Sample for GetDataSource", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_GetDataSource_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_get_data_source_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.get_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "GetTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig", - "shortName": "get_transfer_config" - }, - "description": "Sample for GetTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.get_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "GetTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.GetTransferConfigRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig", - "shortName": "get_transfer_config" - }, - "description": "Sample for GetTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferConfig_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.get_transfer_run", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "GetTransferRun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferRun", - "shortName": "get_transfer_run" - }, - "description": "Sample for GetTransferRun", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.get_transfer_run", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.GetTransferRun", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "GetTransferRun" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.GetTransferRunRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferRun", - "shortName": "get_transfer_run" - }, - "description": "Sample for GetTransferRun", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_GetTransferRun_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_get_transfer_run_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.list_data_sources", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesAsyncPager", - "shortName": "list_data_sources" - }, - "description": "Sample for ListDataSources", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.list_data_sources", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListDataSources", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListDataSourcesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListDataSourcesPager", - "shortName": "list_data_sources" - }, - "description": "Sample for ListDataSources", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListDataSources_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_data_sources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.list_transfer_configs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListTransferConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsAsyncPager", - "shortName": "list_transfer_configs" - }, - "description": "Sample for ListTransferConfigs", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.list_transfer_configs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferConfigs", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListTransferConfigs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListTransferConfigsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferConfigsPager", - "shortName": "list_transfer_configs" - }, - "description": "Sample for ListTransferConfigs", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferConfigs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_configs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.list_transfer_logs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListTransferLogs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsAsyncPager", - "shortName": "list_transfer_logs" - }, - "description": "Sample for ListTransferLogs", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.list_transfer_logs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferLogs", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListTransferLogs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListTransferLogsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferLogsPager", - "shortName": "list_transfer_logs" - }, - "description": "Sample for ListTransferLogs", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferLogs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_logs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.list_transfer_runs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListTransferRuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsAsyncPager", - "shortName": "list_transfer_runs" - }, - "description": "Sample for ListTransferRuns", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.list_transfer_runs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ListTransferRuns", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ListTransferRuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ListTransferRunsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.pagers.ListTransferRunsPager", - "shortName": "list_transfer_runs" - }, - "description": "Sample for ListTransferRuns", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ListTransferRuns_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_list_transfer_runs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.schedule_transfer_runs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ScheduleTransferRuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "start_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "end_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse", - "shortName": "schedule_transfer_runs" - }, - "description": "Sample for ScheduleTransferRuns", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.schedule_transfer_runs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.ScheduleTransferRuns", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "ScheduleTransferRuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "start_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "end_time", - "type": "google.protobuf.timestamp_pb2.Timestamp" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.ScheduleTransferRunsResponse", - "shortName": "schedule_transfer_runs" - }, - "description": "Sample for ScheduleTransferRuns", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_ScheduleTransferRuns_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_schedule_transfer_runs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.start_manual_transfer_runs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.StartManualTransferRuns", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "StartManualTransferRuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse", - "shortName": "start_manual_transfer_runs" - }, - "description": "Sample for StartManualTransferRuns", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.start_manual_transfer_runs", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.StartManualTransferRuns", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "StartManualTransferRuns" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse", - "shortName": "start_manual_transfer_runs" - }, - "description": "Sample for StartManualTransferRuns", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_StartManualTransferRuns_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_start_manual_transfer_runs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.unenroll_data_sources", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UnenrollDataSources", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "UnenrollDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.UnenrollDataSourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "unenroll_data_sources" - }, - "description": "Sample for UnenrollDataSources", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.unenroll_data_sources", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UnenrollDataSources", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "UnenrollDataSources" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.UnenrollDataSourcesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "unenroll_data_sources" - }, - "description": "Sample for UnenrollDataSources", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_UnenrollDataSources_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_unenroll_data_sources_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient", - "shortName": "DataTransferServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceAsyncClient.update_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "UpdateTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest" - }, - { - "name": "transfer_config", - "type": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig", - "shortName": "update_transfer_config" - }, - "description": "Sample for UpdateTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient", - "shortName": "DataTransferServiceClient" - }, - "fullName": "google.cloud.bigquery_datatransfer_v1.DataTransferServiceClient.update_transfer_config", - "method": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService.UpdateTransferConfig", - "service": { - "fullName": "google.cloud.bigquery.datatransfer.v1.DataTransferService", - "shortName": "DataTransferService" - }, - "shortName": "UpdateTransferConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_datatransfer_v1.types.UpdateTransferConfigRequest" - }, - { - "name": "transfer_config", - "type": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_datatransfer_v1.types.TransferConfig", - "shortName": "update_transfer_config" - }, - "description": "Sample for UpdateTransferConfig", - "file": "bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerydatatransfer_v1_generated_DataTransferService_UpdateTransferConfig_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerydatatransfer_v1_generated_data_transfer_service_update_transfer_config_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/scripts/fixup_bigquery_datatransfer_v1_keywords.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/scripts/fixup_bigquery_datatransfer_v1_keywords.py deleted file mode 100644 index b168248b36ab..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/scripts/fixup_bigquery_datatransfer_v1_keywords.py +++ /dev/null @@ -1,191 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_datatransferCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'check_valid_creds': ('name', ), - 'create_transfer_config': ('parent', 'transfer_config', 'authorization_code', 'version_info', 'service_account_name', ), - 'delete_transfer_config': ('name', ), - 'delete_transfer_run': ('name', ), - 'enroll_data_sources': ('name', 'data_source_ids', ), - 'get_data_source': ('name', ), - 'get_transfer_config': ('name', ), - 'get_transfer_run': ('name', ), - 'list_data_sources': ('parent', 'page_token', 'page_size', ), - 'list_transfer_configs': ('parent', 'data_source_ids', 'page_token', 'page_size', ), - 'list_transfer_logs': ('parent', 'page_token', 'page_size', 'message_types', ), - 'list_transfer_runs': ('parent', 'states', 'page_token', 'page_size', 'run_attempt', ), - 'schedule_transfer_runs': ('parent', 'start_time', 'end_time', ), - 'start_manual_transfer_runs': ('parent', 'requested_time_range', 'requested_run_time', ), - 'unenroll_data_sources': ('name', 'data_source_ids', ), - 'update_transfer_config': ('transfer_config', 'update_mask', 'authorization_code', 'version_info', 'service_account_name', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_datatransferCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_datatransfer client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/setup.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/setup.py deleted file mode 100644 index 1b737d4f2193..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-datatransfer' - - -description = "Google Cloud Bigquery Datatransfer API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_datatransfer/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-datatransfer" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/bigquery_datatransfer_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py deleted file mode 100644 index b6dd93923fef..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-datatransfer/v1/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ /dev/null @@ -1,13453 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import DataTransferServiceAsyncClient -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import DataTransferServiceClient -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import pagers -from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import transports -from google.cloud.bigquery_datatransfer_v1.types import datatransfer -from google.cloud.bigquery_datatransfer_v1.types import transfer -from google.cloud.location import locations_pb2 -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.protobuf import wrappers_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DataTransferServiceClient._get_default_mtls_endpoint(None) is None - assert DataTransferServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DataTransferServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DataTransferServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DataTransferServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DataTransferServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert DataTransferServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert DataTransferServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert DataTransferServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - DataTransferServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert DataTransferServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert DataTransferServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert DataTransferServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - DataTransferServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert DataTransferServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert DataTransferServiceClient._get_client_cert_source(None, False) is None - assert DataTransferServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert DataTransferServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert DataTransferServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert DataTransferServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(DataTransferServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceClient)) -@mock.patch.object(DataTransferServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = DataTransferServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataTransferServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataTransferServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert DataTransferServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert DataTransferServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTransferServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert DataTransferServiceClient._get_api_endpoint(None, None, default_universe, "always") == DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTransferServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == DataTransferServiceClient.DEFAULT_MTLS_ENDPOINT - assert DataTransferServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert DataTransferServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - DataTransferServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert DataTransferServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert DataTransferServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert DataTransferServiceClient._get_universe_domain(None, None) == DataTransferServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - DataTransferServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = DataTransferServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = DataTransferServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataTransferServiceClient, "grpc"), - (DataTransferServiceAsyncClient, "grpc_asyncio"), - (DataTransferServiceClient, "rest"), -]) -def test_data_transfer_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerydatatransfer.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigquerydatatransfer.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DataTransferServiceGrpcTransport, "grpc"), - (transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.DataTransferServiceRestTransport, "rest"), -]) -def test_data_transfer_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (DataTransferServiceClient, "grpc"), - (DataTransferServiceAsyncClient, "grpc_asyncio"), - (DataTransferServiceClient, "rest"), -]) -def test_data_transfer_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerydatatransfer.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigquerydatatransfer.googleapis.com' - ) - - -def test_data_transfer_service_client_get_transport_class(): - transport = DataTransferServiceClient.get_transport_class() - available_transports = [ - transports.DataTransferServiceGrpcTransport, - transports.DataTransferServiceRestTransport, - ] - assert transport in available_transports - - transport = DataTransferServiceClient.get_transport_class("grpc") - assert transport == transports.DataTransferServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport, "grpc"), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataTransferServiceClient, transports.DataTransferServiceRestTransport, "rest"), -]) -@mock.patch.object(DataTransferServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceClient)) -@mock.patch.object(DataTransferServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceAsyncClient)) -def test_data_transfer_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DataTransferServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DataTransferServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport, "grpc", "true"), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport, "grpc", "false"), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (DataTransferServiceClient, transports.DataTransferServiceRestTransport, "rest", "true"), - (DataTransferServiceClient, transports.DataTransferServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(DataTransferServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceClient)) -@mock.patch.object(DataTransferServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_data_transfer_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - DataTransferServiceClient, DataTransferServiceAsyncClient -]) -@mock.patch.object(DataTransferServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTransferServiceClient)) -@mock.patch.object(DataTransferServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataTransferServiceAsyncClient)) -def test_data_transfer_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - DataTransferServiceClient, DataTransferServiceAsyncClient -]) -@mock.patch.object(DataTransferServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceClient)) -@mock.patch.object(DataTransferServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(DataTransferServiceAsyncClient)) -def test_data_transfer_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = DataTransferServiceClient._DEFAULT_UNIVERSE - default_endpoint = DataTransferServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = DataTransferServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport, "grpc"), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (DataTransferServiceClient, transports.DataTransferServiceRestTransport, "rest"), -]) -def test_data_transfer_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport, "grpc", grpc_helpers), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (DataTransferServiceClient, transports.DataTransferServiceRestTransport, "rest", None), -]) -def test_data_transfer_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_data_transfer_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DataTransferServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport, "grpc", grpc_helpers), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_data_transfer_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigquerydatatransfer.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigquerydatatransfer.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.GetDataSourceRequest, - dict, -]) -def test_get_data_source(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.DataSource( - name='name_value', - data_source_id='data_source_id_value', - display_name='display_name_value', - description='description_value', - client_id='client_id_value', - scopes=['scopes_value'], - transfer_type=transfer.TransferType.BATCH, - supports_multiple_transfers=True, - update_deadline_seconds=2406, - default_schedule='default_schedule_value', - supports_custom_schedule=True, - help_url='help_url_value', - authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, - data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, - default_data_refresh_window_days=3379, - manual_runs_disabled=True, - ) - response = client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.GetDataSourceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.DataSource) - assert response.name == 'name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.client_id == 'client_id_value' - assert response.scopes == ['scopes_value'] - assert response.transfer_type == transfer.TransferType.BATCH - assert response.supports_multiple_transfers is True - assert response.update_deadline_seconds == 2406 - assert response.default_schedule == 'default_schedule_value' - assert response.supports_custom_schedule is True - assert response.help_url == 'help_url_value' - assert response.authorization_type == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE - assert response.data_refresh_type == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW - assert response.default_data_refresh_window_days == 3379 - assert response.manual_runs_disabled is True - - -def test_get_data_source_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.GetDataSourceRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_data_source(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetDataSourceRequest( - name='name_value', - ) - -def test_get_data_source_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_source in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc - request = {} - client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_source_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_data_source in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_data_source] = mock_rpc - - request = {} - await client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_data_source_async(transport: str = 'grpc_asyncio', request_type=datatransfer.GetDataSourceRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.DataSource( - name='name_value', - data_source_id='data_source_id_value', - display_name='display_name_value', - description='description_value', - client_id='client_id_value', - scopes=['scopes_value'], - transfer_type=transfer.TransferType.BATCH, - supports_multiple_transfers=True, - update_deadline_seconds=2406, - default_schedule='default_schedule_value', - supports_custom_schedule=True, - help_url='help_url_value', - authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, - data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, - default_data_refresh_window_days=3379, - manual_runs_disabled=True, - )) - response = await client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.GetDataSourceRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.DataSource) - assert response.name == 'name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.client_id == 'client_id_value' - assert response.scopes == ['scopes_value'] - assert response.transfer_type == transfer.TransferType.BATCH - assert response.supports_multiple_transfers is True - assert response.update_deadline_seconds == 2406 - assert response.default_schedule == 'default_schedule_value' - assert response.supports_custom_schedule is True - assert response.help_url == 'help_url_value' - assert response.authorization_type == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE - assert response.data_refresh_type == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW - assert response.default_data_refresh_window_days == 3379 - assert response.manual_runs_disabled is True - - -@pytest.mark.asyncio -async def test_get_data_source_async_from_dict(): - await test_get_data_source_async(request_type=dict) - -def test_get_data_source_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.GetDataSourceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value = datatransfer.DataSource() - client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_data_source_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.GetDataSourceRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.DataSource()) - await client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_source_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.DataSource() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_data_source( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_data_source_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_source( - datatransfer.GetDataSourceRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_data_source_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.DataSource() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.DataSource()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_source( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_data_source_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_source( - datatransfer.GetDataSourceRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListDataSourcesRequest, - dict, -]) -def test_list_data_sources(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListDataSourcesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.ListDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_data_sources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.ListDataSourcesRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_data_sources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListDataSourcesRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_data_sources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_sources] = mock_rpc - request = {} - client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_sources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_data_sources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_data_sources] = mock_rpc - - request = {} - await client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_data_sources_async(transport: str = 'grpc_asyncio', request_type=datatransfer.ListDataSourcesRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListDataSourcesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.ListDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_data_sources_async_from_dict(): - await test_list_data_sources_async(request_type=dict) - -def test_list_data_sources_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListDataSourcesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value = datatransfer.ListDataSourcesResponse() - client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_data_sources_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListDataSourcesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListDataSourcesResponse()) - await client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_data_sources_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListDataSourcesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_data_sources( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_data_sources_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_sources( - datatransfer.ListDataSourcesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_data_sources_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListDataSourcesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListDataSourcesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_data_sources( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_data_sources_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_data_sources( - datatransfer.ListDataSourcesRequest(), - parent='parent_value', - ) - - -def test_list_data_sources_pager(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - next_page_token='abc', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - ], - next_page_token='ghi', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_data_sources(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datatransfer.DataSource) - for i in results) -def test_list_data_sources_pages(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - next_page_token='abc', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - ], - next_page_token='ghi', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - ), - RuntimeError, - ) - pages = list(client.list_data_sources(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_data_sources_async_pager(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - next_page_token='abc', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - ], - next_page_token='ghi', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_data_sources(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, datatransfer.DataSource) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_data_sources_async_pages(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - next_page_token='abc', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - ], - next_page_token='ghi', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_data_sources(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datatransfer.CreateTransferConfigRequest, - dict, -]) -def test_create_transfer_config(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - response = client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.CreateTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -def test_create_transfer_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.CreateTransferConfigRequest( - parent='parent_value', - authorization_code='authorization_code_value', - version_info='version_info_value', - service_account_name='service_account_name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_transfer_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CreateTransferConfigRequest( - parent='parent_value', - authorization_code='authorization_code_value', - version_info='version_info_value', - service_account_name='service_account_name_value', - ) - -def test_create_transfer_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_transfer_config] = mock_rpc - request = {} - client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_transfer_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_transfer_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_transfer_config] = mock_rpc - - request = {} - await client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_transfer_config_async(transport: str = 'grpc_asyncio', request_type=datatransfer.CreateTransferConfigRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - response = await client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.CreateTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.asyncio -async def test_create_transfer_config_async_from_dict(): - await test_create_transfer_config_async(request_type=dict) - -def test_create_transfer_config_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.CreateTransferConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - call.return_value = transfer.TransferConfig() - client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_transfer_config_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.CreateTransferConfigRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig()) - await client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_transfer_config_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_transfer_config( - parent='parent_value', - transfer_config=transfer.TransferConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].transfer_config - mock_val = transfer.TransferConfig(name='name_value') - assert arg == mock_val - - -def test_create_transfer_config_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_transfer_config( - datatransfer.CreateTransferConfigRequest(), - parent='parent_value', - transfer_config=transfer.TransferConfig(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_transfer_config_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_transfer_config( - parent='parent_value', - transfer_config=transfer.TransferConfig(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].transfer_config - mock_val = transfer.TransferConfig(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_transfer_config_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_transfer_config( - datatransfer.CreateTransferConfigRequest(), - parent='parent_value', - transfer_config=transfer.TransferConfig(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.UpdateTransferConfigRequest, - dict, -]) -def test_update_transfer_config(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - response = client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.UpdateTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -def test_update_transfer_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.UpdateTransferConfigRequest( - authorization_code='authorization_code_value', - version_info='version_info_value', - service_account_name='service_account_name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_transfer_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.UpdateTransferConfigRequest( - authorization_code='authorization_code_value', - version_info='version_info_value', - service_account_name='service_account_name_value', - ) - -def test_update_transfer_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_transfer_config] = mock_rpc - request = {} - client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_transfer_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_transfer_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_transfer_config] = mock_rpc - - request = {} - await client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_transfer_config_async(transport: str = 'grpc_asyncio', request_type=datatransfer.UpdateTransferConfigRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - response = await client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.UpdateTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.asyncio -async def test_update_transfer_config_async_from_dict(): - await test_update_transfer_config_async(request_type=dict) - -def test_update_transfer_config_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.UpdateTransferConfigRequest() - - request.transfer_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - call.return_value = transfer.TransferConfig() - client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'transfer_config.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_transfer_config_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.UpdateTransferConfigRequest() - - request.transfer_config.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig()) - await client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'transfer_config.name=name_value', - ) in kw['metadata'] - - -def test_update_transfer_config_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_transfer_config( - transfer_config=transfer.TransferConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].transfer_config - mock_val = transfer.TransferConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_transfer_config_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_transfer_config( - datatransfer.UpdateTransferConfigRequest(), - transfer_config=transfer.TransferConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_transfer_config_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_transfer_config( - transfer_config=transfer.TransferConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].transfer_config - mock_val = transfer.TransferConfig(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_transfer_config_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_transfer_config( - datatransfer.UpdateTransferConfigRequest(), - transfer_config=transfer.TransferConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.DeleteTransferConfigRequest, - dict, -]) -def test_delete_transfer_config(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.DeleteTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_transfer_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.DeleteTransferConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_transfer_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferConfigRequest( - name='name_value', - ) - -def test_delete_transfer_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_transfer_config] = mock_rpc - request = {} - client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_transfer_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_transfer_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_transfer_config] = mock_rpc - - request = {} - await client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_transfer_config_async(transport: str = 'grpc_asyncio', request_type=datatransfer.DeleteTransferConfigRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.DeleteTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_transfer_config_async_from_dict(): - await test_delete_transfer_config_async(request_type=dict) - -def test_delete_transfer_config_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.DeleteTransferConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - call.return_value = None - client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_transfer_config_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.DeleteTransferConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_transfer_config_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_transfer_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_transfer_config_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_transfer_config( - datatransfer.DeleteTransferConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_transfer_config_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_transfer_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_transfer_config_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_transfer_config( - datatransfer.DeleteTransferConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.GetTransferConfigRequest, - dict, -]) -def test_get_transfer_config(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - response = client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.GetTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -def test_get_transfer_config_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.GetTransferConfigRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_transfer_config(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferConfigRequest( - name='name_value', - ) - -def test_get_transfer_config_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_transfer_config] = mock_rpc - request = {} - client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_transfer_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_transfer_config in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_transfer_config] = mock_rpc - - request = {} - await client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_transfer_config_async(transport: str = 'grpc_asyncio', request_type=datatransfer.GetTransferConfigRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - response = await client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.GetTransferConfigRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.asyncio -async def test_get_transfer_config_async_from_dict(): - await test_get_transfer_config_async(request_type=dict) - -def test_get_transfer_config_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.GetTransferConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - call.return_value = transfer.TransferConfig() - client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_transfer_config_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.GetTransferConfigRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig()) - await client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_transfer_config_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_transfer_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_transfer_config_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_transfer_config( - datatransfer.GetTransferConfigRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_transfer_config_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferConfig() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_transfer_config( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_transfer_config_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_transfer_config( - datatransfer.GetTransferConfigRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListTransferConfigsRequest, - dict, -]) -def test_list_transfer_configs(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferConfigsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.ListTransferConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_transfer_configs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.ListTransferConfigsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_transfer_configs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferConfigsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_transfer_configs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_transfer_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_transfer_configs] = mock_rpc - request = {} - client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_transfer_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_transfer_configs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_transfer_configs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_transfer_configs] = mock_rpc - - request = {} - await client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_transfer_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_transfer_configs_async(transport: str = 'grpc_asyncio', request_type=datatransfer.ListTransferConfigsRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferConfigsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.ListTransferConfigsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferConfigsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_transfer_configs_async_from_dict(): - await test_list_transfer_configs_async(request_type=dict) - -def test_list_transfer_configs_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListTransferConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - call.return_value = datatransfer.ListTransferConfigsResponse() - client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_transfer_configs_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListTransferConfigsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferConfigsResponse()) - await client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_transfer_configs_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferConfigsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_transfer_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_transfer_configs_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_transfer_configs( - datatransfer.ListTransferConfigsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_transfer_configs_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferConfigsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferConfigsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_transfer_configs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_transfer_configs_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_transfer_configs( - datatransfer.ListTransferConfigsRequest(), - parent='parent_value', - ) - - -def test_list_transfer_configs_pager(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[], - next_page_token='def', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_transfer_configs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, transfer.TransferConfig) - for i in results) -def test_list_transfer_configs_pages(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[], - next_page_token='def', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - ), - RuntimeError, - ) - pages = list(client.list_transfer_configs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_transfer_configs_async_pager(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[], - next_page_token='def', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_transfer_configs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, transfer.TransferConfig) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_transfer_configs_async_pages(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[], - next_page_token='def', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_transfer_configs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datatransfer.ScheduleTransferRunsRequest, - dict, -]) -def test_schedule_transfer_runs(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ScheduleTransferRunsResponse( - ) - response = client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.ScheduleTransferRunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) - - -def test_schedule_transfer_runs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.ScheduleTransferRunsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.schedule_transfer_runs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ScheduleTransferRunsRequest( - parent='parent_value', - ) - -def test_schedule_transfer_runs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.schedule_transfer_runs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.schedule_transfer_runs] = mock_rpc - request = {} - client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.schedule_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_schedule_transfer_runs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.schedule_transfer_runs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.schedule_transfer_runs] = mock_rpc - - request = {} - await client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.schedule_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_schedule_transfer_runs_async(transport: str = 'grpc_asyncio', request_type=datatransfer.ScheduleTransferRunsRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ScheduleTransferRunsResponse( - )) - response = await client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.ScheduleTransferRunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) - - -@pytest.mark.asyncio -async def test_schedule_transfer_runs_async_from_dict(): - await test_schedule_transfer_runs_async(request_type=dict) - -def test_schedule_transfer_runs_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ScheduleTransferRunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - call.return_value = datatransfer.ScheduleTransferRunsResponse() - client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_schedule_transfer_runs_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ScheduleTransferRunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ScheduleTransferRunsResponse()) - await client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_schedule_transfer_runs_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ScheduleTransferRunsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.schedule_transfer_runs( - parent='parent_value', - start_time=timestamp_pb2.Timestamp(seconds=751), - end_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert TimestampRule().to_proto(args[0].start_time) == timestamp_pb2.Timestamp(seconds=751) - assert TimestampRule().to_proto(args[0].end_time) == timestamp_pb2.Timestamp(seconds=751) - - -def test_schedule_transfer_runs_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.schedule_transfer_runs( - datatransfer.ScheduleTransferRunsRequest(), - parent='parent_value', - start_time=timestamp_pb2.Timestamp(seconds=751), - end_time=timestamp_pb2.Timestamp(seconds=751), - ) - -@pytest.mark.asyncio -async def test_schedule_transfer_runs_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ScheduleTransferRunsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ScheduleTransferRunsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.schedule_transfer_runs( - parent='parent_value', - start_time=timestamp_pb2.Timestamp(seconds=751), - end_time=timestamp_pb2.Timestamp(seconds=751), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - assert TimestampRule().to_proto(args[0].start_time) == timestamp_pb2.Timestamp(seconds=751) - assert TimestampRule().to_proto(args[0].end_time) == timestamp_pb2.Timestamp(seconds=751) - -@pytest.mark.asyncio -async def test_schedule_transfer_runs_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.schedule_transfer_runs( - datatransfer.ScheduleTransferRunsRequest(), - parent='parent_value', - start_time=timestamp_pb2.Timestamp(seconds=751), - end_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.StartManualTransferRunsRequest, - dict, -]) -def test_start_manual_transfer_runs(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.StartManualTransferRunsResponse( - ) - response = client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.StartManualTransferRunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.StartManualTransferRunsResponse) - - -def test_start_manual_transfer_runs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.StartManualTransferRunsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.start_manual_transfer_runs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.StartManualTransferRunsRequest( - parent='parent_value', - ) - -def test_start_manual_transfer_runs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.start_manual_transfer_runs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.start_manual_transfer_runs] = mock_rpc - request = {} - client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.start_manual_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_manual_transfer_runs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.start_manual_transfer_runs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.start_manual_transfer_runs] = mock_rpc - - request = {} - await client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.start_manual_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_manual_transfer_runs_async(transport: str = 'grpc_asyncio', request_type=datatransfer.StartManualTransferRunsRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.StartManualTransferRunsResponse( - )) - response = await client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.StartManualTransferRunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.StartManualTransferRunsResponse) - - -@pytest.mark.asyncio -async def test_start_manual_transfer_runs_async_from_dict(): - await test_start_manual_transfer_runs_async(request_type=dict) - -def test_start_manual_transfer_runs_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.StartManualTransferRunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - call.return_value = datatransfer.StartManualTransferRunsResponse() - client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_start_manual_transfer_runs_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.StartManualTransferRunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.StartManualTransferRunsResponse()) - await client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datatransfer.GetTransferRunRequest, - dict, -]) -def test_get_transfer_run(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferRun( - name='name_value', - data_source_id='data_source_id_value', - state=transfer.TransferState.PENDING, - user_id=747, - schedule='schedule_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - response = client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.GetTransferRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferRun) - assert response.name == 'name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.schedule == 'schedule_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -def test_get_transfer_run_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.GetTransferRunRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_transfer_run(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.GetTransferRunRequest( - name='name_value', - ) - -def test_get_transfer_run_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_transfer_run in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_transfer_run] = mock_rpc - request = {} - client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_transfer_run(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_transfer_run_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_transfer_run in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_transfer_run] = mock_rpc - - request = {} - await client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_transfer_run(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_transfer_run_async(transport: str = 'grpc_asyncio', request_type=datatransfer.GetTransferRunRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferRun( - name='name_value', - data_source_id='data_source_id_value', - state=transfer.TransferState.PENDING, - user_id=747, - schedule='schedule_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - response = await client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.GetTransferRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferRun) - assert response.name == 'name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.schedule == 'schedule_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.asyncio -async def test_get_transfer_run_async_from_dict(): - await test_get_transfer_run_async(request_type=dict) - -def test_get_transfer_run_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.GetTransferRunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - call.return_value = transfer.TransferRun() - client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_transfer_run_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.GetTransferRunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferRun()) - await client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_transfer_run_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferRun() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_transfer_run( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_transfer_run_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_transfer_run( - datatransfer.GetTransferRunRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_transfer_run_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = transfer.TransferRun() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferRun()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_transfer_run( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_transfer_run_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_transfer_run( - datatransfer.GetTransferRunRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.DeleteTransferRunRequest, - dict, -]) -def test_delete_transfer_run(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.DeleteTransferRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_transfer_run_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.DeleteTransferRunRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_transfer_run(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.DeleteTransferRunRequest( - name='name_value', - ) - -def test_delete_transfer_run_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_transfer_run in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_transfer_run] = mock_rpc - request = {} - client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_transfer_run(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_transfer_run_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_transfer_run in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_transfer_run] = mock_rpc - - request = {} - await client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_transfer_run(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_transfer_run_async(transport: str = 'grpc_asyncio', request_type=datatransfer.DeleteTransferRunRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.DeleteTransferRunRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_transfer_run_async_from_dict(): - await test_delete_transfer_run_async(request_type=dict) - -def test_delete_transfer_run_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.DeleteTransferRunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - call.return_value = None - client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_transfer_run_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.DeleteTransferRunRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_transfer_run_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_transfer_run( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_transfer_run_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_transfer_run( - datatransfer.DeleteTransferRunRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_transfer_run_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_transfer_run( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_transfer_run_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_transfer_run( - datatransfer.DeleteTransferRunRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListTransferRunsRequest, - dict, -]) -def test_list_transfer_runs(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferRunsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.ListTransferRunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferRunsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_transfer_runs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.ListTransferRunsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_transfer_runs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferRunsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_transfer_runs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_transfer_runs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_transfer_runs] = mock_rpc - request = {} - client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_transfer_runs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_transfer_runs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_transfer_runs] = mock_rpc - - request = {} - await client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_transfer_runs_async(transport: str = 'grpc_asyncio', request_type=datatransfer.ListTransferRunsRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferRunsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.ListTransferRunsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferRunsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_transfer_runs_async_from_dict(): - await test_list_transfer_runs_async(request_type=dict) - -def test_list_transfer_runs_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListTransferRunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - call.return_value = datatransfer.ListTransferRunsResponse() - client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_transfer_runs_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListTransferRunsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferRunsResponse()) - await client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_transfer_runs_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferRunsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_transfer_runs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_transfer_runs_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_transfer_runs( - datatransfer.ListTransferRunsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_transfer_runs_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferRunsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferRunsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_transfer_runs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_transfer_runs_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_transfer_runs( - datatransfer.ListTransferRunsRequest(), - parent='parent_value', - ) - - -def test_list_transfer_runs_pager(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - transfer.TransferRun(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[], - next_page_token='def', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_transfer_runs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, transfer.TransferRun) - for i in results) -def test_list_transfer_runs_pages(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - transfer.TransferRun(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[], - next_page_token='def', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - ], - ), - RuntimeError, - ) - pages = list(client.list_transfer_runs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_transfer_runs_async_pager(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - transfer.TransferRun(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[], - next_page_token='def', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_transfer_runs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, transfer.TransferRun) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_transfer_runs_async_pages(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - transfer.TransferRun(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[], - next_page_token='def', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_transfer_runs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListTransferLogsRequest, - dict, -]) -def test_list_transfer_logs(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferLogsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.ListTransferLogsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferLogsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_transfer_logs_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.ListTransferLogsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_transfer_logs(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.ListTransferLogsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_transfer_logs_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_transfer_logs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_transfer_logs] = mock_rpc - request = {} - client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_transfer_logs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_transfer_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_transfer_logs in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_transfer_logs] = mock_rpc - - request = {} - await client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_transfer_logs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_transfer_logs_async(transport: str = 'grpc_asyncio', request_type=datatransfer.ListTransferLogsRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferLogsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.ListTransferLogsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferLogsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_transfer_logs_async_from_dict(): - await test_list_transfer_logs_async(request_type=dict) - -def test_list_transfer_logs_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListTransferLogsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - call.return_value = datatransfer.ListTransferLogsResponse() - client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_transfer_logs_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.ListTransferLogsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferLogsResponse()) - await client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_transfer_logs_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferLogsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_transfer_logs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_transfer_logs_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_transfer_logs( - datatransfer.ListTransferLogsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_transfer_logs_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.ListTransferLogsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferLogsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_transfer_logs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_transfer_logs_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_transfer_logs( - datatransfer.ListTransferLogsRequest(), - parent='parent_value', - ) - - -def test_list_transfer_logs_pager(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[], - next_page_token='def', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_transfer_logs(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, transfer.TransferMessage) - for i in results) -def test_list_transfer_logs_pages(transport_name: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[], - next_page_token='def', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - ), - RuntimeError, - ) - pages = list(client.list_transfer_logs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_transfer_logs_async_pager(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[], - next_page_token='def', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_transfer_logs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, transfer.TransferMessage) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_transfer_logs_async_pages(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[], - next_page_token='def', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_transfer_logs(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - datatransfer.CheckValidCredsRequest, - dict, -]) -def test_check_valid_creds(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.CheckValidCredsResponse( - has_valid_creds=True, - ) - response = client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.CheckValidCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.CheckValidCredsResponse) - assert response.has_valid_creds is True - - -def test_check_valid_creds_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.CheckValidCredsRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.check_valid_creds(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.CheckValidCredsRequest( - name='name_value', - ) - -def test_check_valid_creds_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.check_valid_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.check_valid_creds] = mock_rpc - request = {} - client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.check_valid_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_check_valid_creds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.check_valid_creds in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.check_valid_creds] = mock_rpc - - request = {} - await client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.check_valid_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_check_valid_creds_async(transport: str = 'grpc_asyncio', request_type=datatransfer.CheckValidCredsRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.CheckValidCredsResponse( - has_valid_creds=True, - )) - response = await client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.CheckValidCredsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.CheckValidCredsResponse) - assert response.has_valid_creds is True - - -@pytest.mark.asyncio -async def test_check_valid_creds_async_from_dict(): - await test_check_valid_creds_async(request_type=dict) - -def test_check_valid_creds_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.CheckValidCredsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - call.return_value = datatransfer.CheckValidCredsResponse() - client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_check_valid_creds_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.CheckValidCredsRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.CheckValidCredsResponse()) - await client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_check_valid_creds_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.CheckValidCredsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.check_valid_creds( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_check_valid_creds_flattened_error(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.check_valid_creds( - datatransfer.CheckValidCredsRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_check_valid_creds_flattened_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = datatransfer.CheckValidCredsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.CheckValidCredsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.check_valid_creds( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_check_valid_creds_flattened_error_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.check_valid_creds( - datatransfer.CheckValidCredsRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.EnrollDataSourcesRequest, - dict, -]) -def test_enroll_data_sources(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.EnrollDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_enroll_data_sources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.EnrollDataSourcesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.enroll_data_sources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.EnrollDataSourcesRequest( - name='name_value', - ) - -def test_enroll_data_sources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enroll_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.enroll_data_sources] = mock_rpc - request = {} - client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enroll_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_enroll_data_sources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.enroll_data_sources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.enroll_data_sources] = mock_rpc - - request = {} - await client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.enroll_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_enroll_data_sources_async(transport: str = 'grpc_asyncio', request_type=datatransfer.EnrollDataSourcesRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.EnrollDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_enroll_data_sources_async_from_dict(): - await test_enroll_data_sources_async(request_type=dict) - -def test_enroll_data_sources_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.EnrollDataSourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - call.return_value = None - client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_enroll_data_sources_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.EnrollDataSourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - datatransfer.UnenrollDataSourcesRequest, - dict, -]) -def test_unenroll_data_sources(request_type, transport: str = 'grpc'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = datatransfer.UnenrollDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_unenroll_data_sources_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = datatransfer.UnenrollDataSourcesRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.unenroll_data_sources(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == datatransfer.UnenrollDataSourcesRequest( - name='name_value', - ) - -def test_unenroll_data_sources_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.unenroll_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.unenroll_data_sources] = mock_rpc - request = {} - client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.unenroll_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_unenroll_data_sources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.unenroll_data_sources in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.unenroll_data_sources] = mock_rpc - - request = {} - await client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.unenroll_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_unenroll_data_sources_async(transport: str = 'grpc_asyncio', request_type=datatransfer.UnenrollDataSourcesRequest): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = datatransfer.UnenrollDataSourcesRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_unenroll_data_sources_async_from_dict(): - await test_unenroll_data_sources_async(request_type=dict) - -def test_unenroll_data_sources_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.UnenrollDataSourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - call.return_value = None - client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_unenroll_data_sources_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = datatransfer.UnenrollDataSourcesRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_data_source_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_data_source in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc - - request = {} - client.get_data_source(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_data_source(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_data_source_rest_required_fields(request_type=datatransfer.GetDataSourceRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.DataSource() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_data_source(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_data_source_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_data_source._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_data_source_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.DataSource() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataSources/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_data_source(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataSources/*}" % client.transport._host, args[1]) - - -def test_get_data_source_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_source( - datatransfer.GetDataSourceRequest(), - name='name_value', - ) - - -def test_list_data_sources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_data_sources] = mock_rpc - - request = {} - client.list_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_data_sources_rest_required_fields(request_type=datatransfer.ListDataSourcesRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_data_sources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListDataSourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_data_sources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_data_sources_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_data_sources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_data_sources_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListDataSourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_data_sources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/dataSources" % client.transport._host, args[1]) - - -def test_list_data_sources_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_data_sources( - datatransfer.ListDataSourcesRequest(), - parent='parent_value', - ) - - -def test_list_data_sources_rest_pager(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - next_page_token='abc', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[], - next_page_token='def', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - ], - next_page_token='ghi', - ), - datatransfer.ListDataSourcesResponse( - data_sources=[ - datatransfer.DataSource(), - datatransfer.DataSource(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datatransfer.ListDataSourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_data_sources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, datatransfer.DataSource) - for i in results) - - pages = list(client.list_data_sources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_create_transfer_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_transfer_config] = mock_rpc - - request = {} - client.create_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_transfer_config_rest_required_fields(request_type=datatransfer.CreateTransferConfigRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_transfer_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_transfer_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("authorization_code", "service_account_name", "version_info", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_transfer_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_transfer_config_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_transfer_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("authorizationCode", "serviceAccountName", "versionInfo", )) & set(("parent", "transferConfig", ))) - - -def test_create_transfer_config_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - transfer_config=transfer.TransferConfig(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_transfer_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/transferConfigs" % client.transport._host, args[1]) - - -def test_create_transfer_config_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_transfer_config( - datatransfer.CreateTransferConfigRequest(), - parent='parent_value', - transfer_config=transfer.TransferConfig(name='name_value'), - ) - - -def test_update_transfer_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_transfer_config] = mock_rpc - - request = {} - client.update_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_transfer_config_rest_required_fields(request_type=datatransfer.UpdateTransferConfigRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_transfer_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_transfer_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("authorization_code", "service_account_name", "update_mask", "version_info", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_transfer_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_transfer_config_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_transfer_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("authorizationCode", "serviceAccountName", "updateMask", "versionInfo", )) & set(("transferConfig", "updateMask", ))) - - -def test_update_transfer_config_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'transfer_config': {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - transfer_config=transfer.TransferConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_transfer_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}" % client.transport._host, args[1]) - - -def test_update_transfer_config_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_transfer_config( - datatransfer.UpdateTransferConfigRequest(), - transfer_config=transfer.TransferConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_delete_transfer_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_transfer_config] = mock_rpc - - request = {} - client.delete_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_transfer_config_rest_required_fields(request_type=datatransfer.DeleteTransferConfigRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_transfer_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_transfer_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_transfer_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_transfer_config_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_transfer_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_transfer_config_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_transfer_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/transferConfigs/*}" % client.transport._host, args[1]) - - -def test_delete_transfer_config_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_transfer_config( - datatransfer.DeleteTransferConfigRequest(), - name='name_value', - ) - - -def test_get_transfer_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_transfer_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_transfer_config] = mock_rpc - - request = {} - client.get_transfer_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_transfer_config(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_transfer_config_rest_required_fields(request_type=datatransfer.GetTransferConfigRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_transfer_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_transfer_config._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_transfer_config(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_transfer_config_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_transfer_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_transfer_config_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_transfer_config(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/transferConfigs/*}" % client.transport._host, args[1]) - - -def test_get_transfer_config_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_transfer_config( - datatransfer.GetTransferConfigRequest(), - name='name_value', - ) - - -def test_list_transfer_configs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_transfer_configs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_transfer_configs] = mock_rpc - - request = {} - client.list_transfer_configs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_transfer_configs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_transfer_configs_rest_required_fields(request_type=datatransfer.ListTransferConfigsRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_transfer_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_transfer_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("data_source_ids", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferConfigsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_transfer_configs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_transfer_configs_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_transfer_configs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("dataSourceIds", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_transfer_configs_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferConfigsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_transfer_configs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/transferConfigs" % client.transport._host, args[1]) - - -def test_list_transfer_configs_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_transfer_configs( - datatransfer.ListTransferConfigsRequest(), - parent='parent_value', - ) - - -def test_list_transfer_configs_rest_pager(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[], - next_page_token='def', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferConfigsResponse( - transfer_configs=[ - transfer.TransferConfig(), - transfer.TransferConfig(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datatransfer.ListTransferConfigsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_transfer_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, transfer.TransferConfig) - for i in results) - - pages = list(client.list_transfer_configs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_schedule_transfer_runs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.schedule_transfer_runs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.schedule_transfer_runs] = mock_rpc - - request = {} - client.schedule_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.schedule_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_schedule_transfer_runs_rest_required_fields(request_type=datatransfer.ScheduleTransferRunsRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).schedule_transfer_runs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).schedule_transfer_runs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.ScheduleTransferRunsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.schedule_transfer_runs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_schedule_transfer_runs_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.schedule_transfer_runs._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "startTime", "endTime", ))) - - -def test_schedule_transfer_runs_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ScheduleTransferRunsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - start_time=timestamp_pb2.Timestamp(seconds=751), - end_time=timestamp_pb2.Timestamp(seconds=751), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.schedule_transfer_runs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns" % client.transport._host, args[1]) - - -def test_schedule_transfer_runs_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.schedule_transfer_runs( - datatransfer.ScheduleTransferRunsRequest(), - parent='parent_value', - start_time=timestamp_pb2.Timestamp(seconds=751), - end_time=timestamp_pb2.Timestamp(seconds=751), - ) - - -def test_start_manual_transfer_runs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.start_manual_transfer_runs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.start_manual_transfer_runs] = mock_rpc - - request = {} - client.start_manual_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.start_manual_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_start_manual_transfer_runs_rest_required_fields(request_type=datatransfer.StartManualTransferRunsRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_manual_transfer_runs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).start_manual_transfer_runs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.StartManualTransferRunsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.StartManualTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.start_manual_transfer_runs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_start_manual_transfer_runs_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.start_manual_transfer_runs._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) - - -def test_get_transfer_run_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_transfer_run in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_transfer_run] = mock_rpc - - request = {} - client.get_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_transfer_run(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_transfer_run_rest_required_fields(request_type=datatransfer.GetTransferRunRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_transfer_run._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_transfer_run._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = transfer.TransferRun() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferRun.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_transfer_run(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_transfer_run_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_transfer_run._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_transfer_run_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferRun() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = transfer.TransferRun.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_transfer_run(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" % client.transport._host, args[1]) - - -def test_get_transfer_run_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_transfer_run( - datatransfer.GetTransferRunRequest(), - name='name_value', - ) - - -def test_delete_transfer_run_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_transfer_run in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_transfer_run] = mock_rpc - - request = {} - client.delete_transfer_run(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_transfer_run(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_transfer_run_rest_required_fields(request_type=datatransfer.DeleteTransferRunRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_transfer_run._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_transfer_run._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_transfer_run(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_transfer_run_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_transfer_run._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_transfer_run_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_transfer_run(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" % client.transport._host, args[1]) - - -def test_delete_transfer_run_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_transfer_run( - datatransfer.DeleteTransferRunRequest(), - name='name_value', - ) - - -def test_list_transfer_runs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_transfer_runs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_transfer_runs] = mock_rpc - - request = {} - client.list_transfer_runs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_transfer_runs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_transfer_runs_rest_required_fields(request_type=datatransfer.ListTransferRunsRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_transfer_runs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_transfer_runs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "run_attempt", "states", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferRunsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_transfer_runs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_transfer_runs_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_transfer_runs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "runAttempt", "states", )) & set(("parent", ))) - - -def test_list_transfer_runs_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferRunsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.ListTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_transfer_runs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs" % client.transport._host, args[1]) - - -def test_list_transfer_runs_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_transfer_runs( - datatransfer.ListTransferRunsRequest(), - parent='parent_value', - ) - - -def test_list_transfer_runs_rest_pager(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - transfer.TransferRun(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[], - next_page_token='def', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferRunsResponse( - transfer_runs=[ - transfer.TransferRun(), - transfer.TransferRun(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datatransfer.ListTransferRunsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - - pager = client.list_transfer_runs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, transfer.TransferRun) - for i in results) - - pages = list(client.list_transfer_runs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_list_transfer_logs_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_transfer_logs in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_transfer_logs] = mock_rpc - - request = {} - client.list_transfer_logs(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_transfer_logs(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_transfer_logs_rest_required_fields(request_type=datatransfer.ListTransferLogsRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_transfer_logs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_transfer_logs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("message_types", "page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferLogsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListTransferLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_transfer_logs(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_transfer_logs_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_transfer_logs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("messageTypes", "pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_transfer_logs_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferLogsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.ListTransferLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_transfer_logs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs" % client.transport._host, args[1]) - - -def test_list_transfer_logs_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_transfer_logs( - datatransfer.ListTransferLogsRequest(), - parent='parent_value', - ) - - -def test_list_transfer_logs_rest_pager(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - next_page_token='abc', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[], - next_page_token='def', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - ], - next_page_token='ghi', - ), - datatransfer.ListTransferLogsResponse( - transfer_messages=[ - transfer.TransferMessage(), - transfer.TransferMessage(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(datatransfer.ListTransferLogsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - - pager = client.list_transfer_logs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, transfer.TransferMessage) - for i in results) - - pages = list(client.list_transfer_logs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_check_valid_creds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.check_valid_creds in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.check_valid_creds] = mock_rpc - - request = {} - client.check_valid_creds(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.check_valid_creds(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_check_valid_creds_rest_required_fields(request_type=datatransfer.CheckValidCredsRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).check_valid_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).check_valid_creds._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = datatransfer.CheckValidCredsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.CheckValidCredsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.check_valid_creds(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_check_valid_creds_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.check_valid_creds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_check_valid_creds_rest_flattened(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.CheckValidCredsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/dataSources/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = datatransfer.CheckValidCredsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.check_valid_creds(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds" % client.transport._host, args[1]) - - -def test_check_valid_creds_rest_flattened_error(transport: str = 'rest'): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.check_valid_creds( - datatransfer.CheckValidCredsRequest(), - name='name_value', - ) - - -def test_enroll_data_sources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.enroll_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.enroll_data_sources] = mock_rpc - - request = {} - client.enroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.enroll_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_enroll_data_sources_rest_required_fields(request_type=datatransfer.EnrollDataSourcesRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enroll_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).enroll_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.enroll_data_sources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_enroll_data_sources_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.enroll_data_sources._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_unenroll_data_sources_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.unenroll_data_sources in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.unenroll_data_sources] = mock_rpc - - request = {} - client.unenroll_data_sources(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.unenroll_data_sources(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_unenroll_data_sources_rest_required_fields(request_type=datatransfer.UnenrollDataSourcesRequest): - transport_class = transports.DataTransferServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).unenroll_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).unenroll_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.unenroll_data_sources(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_unenroll_data_sources_rest_unset_required_fields(): - transport = transports.DataTransferServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.unenroll_data_sources._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataTransferServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DataTransferServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DataTransferServiceGrpcTransport, - transports.DataTransferServiceGrpcAsyncIOTransport, - transports.DataTransferServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = DataTransferServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_source_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - call.return_value = datatransfer.DataSource() - client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_sources_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - call.return_value = datatransfer.ListDataSourcesResponse() - client.list_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_transfer_config_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - call.return_value = transfer.TransferConfig() - client.create_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.CreateTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_transfer_config_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - call.return_value = transfer.TransferConfig() - client.update_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.UpdateTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_transfer_config_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - call.return_value = None - client.delete_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.DeleteTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_transfer_config_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - call.return_value = transfer.TransferConfig() - client.get_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_transfer_configs_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - call.return_value = datatransfer.ListTransferConfigsResponse() - client.list_transfer_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_schedule_transfer_runs_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - call.return_value = datatransfer.ScheduleTransferRunsResponse() - client.schedule_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ScheduleTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_start_manual_transfer_runs_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - call.return_value = datatransfer.StartManualTransferRunsResponse() - client.start_manual_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.StartManualTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_transfer_run_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - call.return_value = transfer.TransferRun() - client.get_transfer_run(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetTransferRunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_transfer_run_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - call.return_value = None - client.delete_transfer_run(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.DeleteTransferRunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_transfer_runs_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - call.return_value = datatransfer.ListTransferRunsResponse() - client.list_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_transfer_logs_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - call.return_value = datatransfer.ListTransferLogsResponse() - client.list_transfer_logs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferLogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_check_valid_creds_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - call.return_value = datatransfer.CheckValidCredsResponse() - client.check_valid_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.CheckValidCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_enroll_data_sources_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - call.return_value = None - client.enroll_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.EnrollDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_unenroll_data_sources_empty_call_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - call.return_value = None - client.unenroll_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.UnenrollDataSourcesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = DataTransferServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_source_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.DataSource( - name='name_value', - data_source_id='data_source_id_value', - display_name='display_name_value', - description='description_value', - client_id='client_id_value', - scopes=['scopes_value'], - transfer_type=transfer.TransferType.BATCH, - supports_multiple_transfers=True, - update_deadline_seconds=2406, - default_schedule='default_schedule_value', - supports_custom_schedule=True, - help_url='help_url_value', - authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, - data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, - default_data_refresh_window_days=3379, - manual_runs_disabled=True, - )) - await client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_sources_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListDataSourcesResponse( - next_page_token='next_page_token_value', - )) - await client.list_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_transfer_config_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - await client.create_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.CreateTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_transfer_config_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - await client.update_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.UpdateTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_transfer_config_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.DeleteTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_transfer_config_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - await client.get_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_transfer_configs_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferConfigsResponse( - next_page_token='next_page_token_value', - )) - await client.list_transfer_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_schedule_transfer_runs_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ScheduleTransferRunsResponse( - )) - await client.schedule_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ScheduleTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_start_manual_transfer_runs_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.StartManualTransferRunsResponse( - )) - await client.start_manual_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.StartManualTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_transfer_run_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(transfer.TransferRun( - name='name_value', - data_source_id='data_source_id_value', - state=transfer.TransferState.PENDING, - user_id=747, - schedule='schedule_value', - notification_pubsub_topic='notification_pubsub_topic_value', - )) - await client.get_transfer_run(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetTransferRunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_transfer_run_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_transfer_run(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.DeleteTransferRunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_transfer_runs_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferRunsResponse( - next_page_token='next_page_token_value', - )) - await client.list_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_transfer_logs_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.ListTransferLogsResponse( - next_page_token='next_page_token_value', - )) - await client.list_transfer_logs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferLogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_check_valid_creds_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datatransfer.CheckValidCredsResponse( - has_valid_creds=True, - )) - await client.check_valid_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.CheckValidCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_enroll_data_sources_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.enroll_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.EnrollDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_unenroll_data_sources_empty_call_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.unenroll_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.UnenrollDataSourcesRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = DataTransferServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_get_data_source_rest_bad_request(request_type=datatransfer.GetDataSourceRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataSources/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_source(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.GetDataSourceRequest, - dict, -]) -def test_get_data_source_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataSources/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.DataSource( - name='name_value', - data_source_id='data_source_id_value', - display_name='display_name_value', - description='description_value', - client_id='client_id_value', - scopes=['scopes_value'], - transfer_type=transfer.TransferType.BATCH, - supports_multiple_transfers=True, - update_deadline_seconds=2406, - default_schedule='default_schedule_value', - supports_custom_schedule=True, - help_url='help_url_value', - authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, - data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, - default_data_refresh_window_days=3379, - manual_runs_disabled=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_source(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.DataSource) - assert response.name == 'name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.client_id == 'client_id_value' - assert response.scopes == ['scopes_value'] - assert response.transfer_type == transfer.TransferType.BATCH - assert response.supports_multiple_transfers is True - assert response.update_deadline_seconds == 2406 - assert response.default_schedule == 'default_schedule_value' - assert response.supports_custom_schedule is True - assert response.help_url == 'help_url_value' - assert response.authorization_type == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE - assert response.data_refresh_type == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW - assert response.default_data_refresh_window_days == 3379 - assert response.manual_runs_disabled is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_source_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_get_data_source") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_get_data_source_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_get_data_source") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.GetDataSourceRequest.pb(datatransfer.GetDataSourceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.DataSource.to_json(datatransfer.DataSource()) - req.return_value.content = return_value - - request = datatransfer.GetDataSourceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.DataSource() - post_with_metadata.return_value = datatransfer.DataSource(), metadata - - client.get_data_source(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_data_sources_rest_bad_request(request_type=datatransfer.ListDataSourcesRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_sources(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListDataSourcesRequest, - dict, -]) -def test_list_data_sources_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListDataSourcesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_sources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_sources_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_data_sources") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_data_sources_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_list_data_sources") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.ListDataSourcesRequest.pb(datatransfer.ListDataSourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.ListDataSourcesResponse.to_json(datatransfer.ListDataSourcesResponse()) - req.return_value.content = return_value - - request = datatransfer.ListDataSourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.ListDataSourcesResponse() - post_with_metadata.return_value = datatransfer.ListDataSourcesResponse(), metadata - - client.list_data_sources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_transfer_config_rest_bad_request(request_type=datatransfer.CreateTransferConfigRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_transfer_config(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.CreateTransferConfigRequest, - dict, -]) -def test_create_transfer_config_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["transfer_config"] = {'name': 'name_value', 'destination_dataset_id': 'destination_dataset_id_value', 'display_name': 'display_name_value', 'data_source_id': 'data_source_id_value', 'params': {'fields': {}}, 'schedule': 'schedule_value', 'schedule_options': {'disable_auto_scheduling': True, 'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'schedule_options_v2': {'time_based_schedule': {'schedule': 'schedule_value', 'start_time': {}, 'end_time': {}}, 'manual_schedule': {}, 'event_driven_schedule': {'pubsub_subscription': 'pubsub_subscription_value'}}, 'data_refresh_window_days': 2543, 'disabled': True, 'update_time': {}, 'next_run_time': {}, 'state': 2, 'user_id': 747, 'dataset_region': 'dataset_region_value', 'notification_pubsub_topic': 'notification_pubsub_topic_value', 'email_preferences': {'enable_failure_email': True}, 'owner_info': {'email': 'email_value'}, 'encryption_configuration': {'kms_key_name': {'value': 'value_value'}}, 'error': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datatransfer.CreateTransferConfigRequest.meta.fields["transfer_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["transfer_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["transfer_config"][field])): - del request_init["transfer_config"][field][i][subfield] - else: - del request_init["transfer_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_transfer_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_transfer_config_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_create_transfer_config") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_create_transfer_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_create_transfer_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.CreateTransferConfigRequest.pb(datatransfer.CreateTransferConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = transfer.TransferConfig.to_json(transfer.TransferConfig()) - req.return_value.content = return_value - - request = datatransfer.CreateTransferConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = transfer.TransferConfig() - post_with_metadata.return_value = transfer.TransferConfig(), metadata - - client.create_transfer_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_transfer_config_rest_bad_request(request_type=datatransfer.UpdateTransferConfigRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'transfer_config': {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_transfer_config(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.UpdateTransferConfigRequest, - dict, -]) -def test_update_transfer_config_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'transfer_config': {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'}} - request_init["transfer_config"] = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3', 'destination_dataset_id': 'destination_dataset_id_value', 'display_name': 'display_name_value', 'data_source_id': 'data_source_id_value', 'params': {'fields': {}}, 'schedule': 'schedule_value', 'schedule_options': {'disable_auto_scheduling': True, 'start_time': {'seconds': 751, 'nanos': 543}, 'end_time': {}}, 'schedule_options_v2': {'time_based_schedule': {'schedule': 'schedule_value', 'start_time': {}, 'end_time': {}}, 'manual_schedule': {}, 'event_driven_schedule': {'pubsub_subscription': 'pubsub_subscription_value'}}, 'data_refresh_window_days': 2543, 'disabled': True, 'update_time': {}, 'next_run_time': {}, 'state': 2, 'user_id': 747, 'dataset_region': 'dataset_region_value', 'notification_pubsub_topic': 'notification_pubsub_topic_value', 'email_preferences': {'enable_failure_email': True}, 'owner_info': {'email': 'email_value'}, 'encryption_configuration': {'kms_key_name': {'value': 'value_value'}}, 'error': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = datatransfer.UpdateTransferConfigRequest.meta.fields["transfer_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["transfer_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["transfer_config"][field])): - del request_init["transfer_config"][field][i][subfield] - else: - del request_init["transfer_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_transfer_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_transfer_config_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_update_transfer_config") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_update_transfer_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_update_transfer_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.UpdateTransferConfigRequest.pb(datatransfer.UpdateTransferConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = transfer.TransferConfig.to_json(transfer.TransferConfig()) - req.return_value.content = return_value - - request = datatransfer.UpdateTransferConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = transfer.TransferConfig() - post_with_metadata.return_value = transfer.TransferConfig(), metadata - - client.update_transfer_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_transfer_config_rest_bad_request(request_type=datatransfer.DeleteTransferConfigRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_transfer_config(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.DeleteTransferConfigRequest, - dict, -]) -def test_delete_transfer_config_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_transfer_config(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_transfer_config_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_delete_transfer_config") as pre: - pre.assert_not_called() - pb_message = datatransfer.DeleteTransferConfigRequest.pb(datatransfer.DeleteTransferConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = datatransfer.DeleteTransferConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_transfer_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_transfer_config_rest_bad_request(request_type=datatransfer.GetTransferConfigRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_transfer_config(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.GetTransferConfigRequest, - dict, -]) -def test_get_transfer_config_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferConfig( - name='name_value', - display_name='display_name_value', - data_source_id='data_source_id_value', - schedule='schedule_value', - data_refresh_window_days=2543, - disabled=True, - state=transfer.TransferState.PENDING, - user_id=747, - dataset_region='dataset_region_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_transfer_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferConfig) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.schedule == 'schedule_value' - assert response.data_refresh_window_days == 2543 - assert response.disabled is True - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.dataset_region == 'dataset_region_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_transfer_config_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_get_transfer_config") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_get_transfer_config_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_get_transfer_config") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.GetTransferConfigRequest.pb(datatransfer.GetTransferConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = transfer.TransferConfig.to_json(transfer.TransferConfig()) - req.return_value.content = return_value - - request = datatransfer.GetTransferConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = transfer.TransferConfig() - post_with_metadata.return_value = transfer.TransferConfig(), metadata - - client.get_transfer_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_transfer_configs_rest_bad_request(request_type=datatransfer.ListTransferConfigsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_transfer_configs(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListTransferConfigsRequest, - dict, -]) -def test_list_transfer_configs_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferConfigsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_transfer_configs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferConfigsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_transfer_configs_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_transfer_configs") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_transfer_configs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_list_transfer_configs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.ListTransferConfigsRequest.pb(datatransfer.ListTransferConfigsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.ListTransferConfigsResponse.to_json(datatransfer.ListTransferConfigsResponse()) - req.return_value.content = return_value - - request = datatransfer.ListTransferConfigsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.ListTransferConfigsResponse() - post_with_metadata.return_value = datatransfer.ListTransferConfigsResponse(), metadata - - client.list_transfer_configs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_schedule_transfer_runs_rest_bad_request(request_type=datatransfer.ScheduleTransferRunsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.schedule_transfer_runs(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ScheduleTransferRunsRequest, - dict, -]) -def test_schedule_transfer_runs_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ScheduleTransferRunsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.schedule_transfer_runs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_schedule_transfer_runs_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_schedule_transfer_runs") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_schedule_transfer_runs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_schedule_transfer_runs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.ScheduleTransferRunsRequest.pb(datatransfer.ScheduleTransferRunsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.ScheduleTransferRunsResponse.to_json(datatransfer.ScheduleTransferRunsResponse()) - req.return_value.content = return_value - - request = datatransfer.ScheduleTransferRunsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.ScheduleTransferRunsResponse() - post_with_metadata.return_value = datatransfer.ScheduleTransferRunsResponse(), metadata - - client.schedule_transfer_runs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_start_manual_transfer_runs_rest_bad_request(request_type=datatransfer.StartManualTransferRunsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.start_manual_transfer_runs(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.StartManualTransferRunsRequest, - dict, -]) -def test_start_manual_transfer_runs_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.StartManualTransferRunsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.StartManualTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.start_manual_transfer_runs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.StartManualTransferRunsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_start_manual_transfer_runs_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_start_manual_transfer_runs") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_start_manual_transfer_runs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_start_manual_transfer_runs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.StartManualTransferRunsRequest.pb(datatransfer.StartManualTransferRunsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.StartManualTransferRunsResponse.to_json(datatransfer.StartManualTransferRunsResponse()) - req.return_value.content = return_value - - request = datatransfer.StartManualTransferRunsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.StartManualTransferRunsResponse() - post_with_metadata.return_value = datatransfer.StartManualTransferRunsResponse(), metadata - - client.start_manual_transfer_runs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_transfer_run_rest_bad_request(request_type=datatransfer.GetTransferRunRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_transfer_run(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.GetTransferRunRequest, - dict, -]) -def test_get_transfer_run_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = transfer.TransferRun( - name='name_value', - data_source_id='data_source_id_value', - state=transfer.TransferState.PENDING, - user_id=747, - schedule='schedule_value', - notification_pubsub_topic='notification_pubsub_topic_value', - destination_dataset_id='destination_dataset_id_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = transfer.TransferRun.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_transfer_run(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, transfer.TransferRun) - assert response.name == 'name_value' - assert response.data_source_id == 'data_source_id_value' - assert response.state == transfer.TransferState.PENDING - assert response.user_id == 747 - assert response.schedule == 'schedule_value' - assert response.notification_pubsub_topic == 'notification_pubsub_topic_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_transfer_run_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_get_transfer_run") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_get_transfer_run_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_get_transfer_run") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.GetTransferRunRequest.pb(datatransfer.GetTransferRunRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = transfer.TransferRun.to_json(transfer.TransferRun()) - req.return_value.content = return_value - - request = datatransfer.GetTransferRunRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = transfer.TransferRun() - post_with_metadata.return_value = transfer.TransferRun(), metadata - - client.get_transfer_run(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_transfer_run_rest_bad_request(request_type=datatransfer.DeleteTransferRunRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_transfer_run(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.DeleteTransferRunRequest, - dict, -]) -def test_delete_transfer_run_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_transfer_run(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_transfer_run_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_delete_transfer_run") as pre: - pre.assert_not_called() - pb_message = datatransfer.DeleteTransferRunRequest.pb(datatransfer.DeleteTransferRunRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = datatransfer.DeleteTransferRunRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_transfer_run(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_list_transfer_runs_rest_bad_request(request_type=datatransfer.ListTransferRunsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_transfer_runs(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListTransferRunsRequest, - dict, -]) -def test_list_transfer_runs_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferRunsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListTransferRunsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_transfer_runs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferRunsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_transfer_runs_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_transfer_runs") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_transfer_runs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_list_transfer_runs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.ListTransferRunsRequest.pb(datatransfer.ListTransferRunsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.ListTransferRunsResponse.to_json(datatransfer.ListTransferRunsResponse()) - req.return_value.content = return_value - - request = datatransfer.ListTransferRunsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.ListTransferRunsResponse() - post_with_metadata.return_value = datatransfer.ListTransferRunsResponse(), metadata - - client.list_transfer_runs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_transfer_logs_rest_bad_request(request_type=datatransfer.ListTransferLogsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_transfer_logs(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.ListTransferLogsRequest, - dict, -]) -def test_list_transfer_logs_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.ListTransferLogsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.ListTransferLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_transfer_logs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTransferLogsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_transfer_logs_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_transfer_logs") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_list_transfer_logs_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_list_transfer_logs") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.ListTransferLogsRequest.pb(datatransfer.ListTransferLogsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.ListTransferLogsResponse.to_json(datatransfer.ListTransferLogsResponse()) - req.return_value.content = return_value - - request = datatransfer.ListTransferLogsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.ListTransferLogsResponse() - post_with_metadata.return_value = datatransfer.ListTransferLogsResponse(), metadata - - client.list_transfer_logs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_check_valid_creds_rest_bad_request(request_type=datatransfer.CheckValidCredsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataSources/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.check_valid_creds(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.CheckValidCredsRequest, - dict, -]) -def test_check_valid_creds_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/dataSources/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = datatransfer.CheckValidCredsResponse( - has_valid_creds=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = datatransfer.CheckValidCredsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.check_valid_creds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, datatransfer.CheckValidCredsResponse) - assert response.has_valid_creds is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_check_valid_creds_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_check_valid_creds") as post, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "post_check_valid_creds_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_check_valid_creds") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = datatransfer.CheckValidCredsRequest.pb(datatransfer.CheckValidCredsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = datatransfer.CheckValidCredsResponse.to_json(datatransfer.CheckValidCredsResponse()) - req.return_value.content = return_value - - request = datatransfer.CheckValidCredsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = datatransfer.CheckValidCredsResponse() - post_with_metadata.return_value = datatransfer.CheckValidCredsResponse(), metadata - - client.check_valid_creds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_enroll_data_sources_rest_bad_request(request_type=datatransfer.EnrollDataSourcesRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.enroll_data_sources(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.EnrollDataSourcesRequest, - dict, -]) -def test_enroll_data_sources_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.enroll_data_sources(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_enroll_data_sources_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_enroll_data_sources") as pre: - pre.assert_not_called() - pb_message = datatransfer.EnrollDataSourcesRequest.pb(datatransfer.EnrollDataSourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = datatransfer.EnrollDataSourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.enroll_data_sources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_unenroll_data_sources_rest_bad_request(request_type=datatransfer.UnenrollDataSourcesRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.unenroll_data_sources(request) - - -@pytest.mark.parametrize("request_type", [ - datatransfer.UnenrollDataSourcesRequest, - dict, -]) -def test_unenroll_data_sources_rest_call_success(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.unenroll_data_sources(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_unenroll_data_sources_rest_interceptors(null_interceptor): - transport = transports.DataTransferServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.DataTransferServiceRestInterceptor(), - ) - client = DataTransferServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.DataTransferServiceRestInterceptor, "pre_unenroll_data_sources") as pre: - pre.assert_not_called() - pb_message = datatransfer.UnenrollDataSourcesRequest.pb(datatransfer.UnenrollDataSourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = datatransfer.UnenrollDataSourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.unenroll_data_sources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_location(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_location(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - - -def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_locations(request) - - -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_locations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_initialize_client_w_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_source_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_data_source), - '__call__') as call: - client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetDataSourceRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_sources_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), - '__call__') as call: - client.list_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_transfer_config_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_transfer_config), - '__call__') as call: - client.create_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.CreateTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_transfer_config_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_transfer_config), - '__call__') as call: - client.update_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.UpdateTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_transfer_config_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_config), - '__call__') as call: - client.delete_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.DeleteTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_transfer_config_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_config), - '__call__') as call: - client.get_transfer_config(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetTransferConfigRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_transfer_configs_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_configs), - '__call__') as call: - client.list_transfer_configs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferConfigsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_schedule_transfer_runs_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.schedule_transfer_runs), - '__call__') as call: - client.schedule_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ScheduleTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_start_manual_transfer_runs_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_manual_transfer_runs), - '__call__') as call: - client.start_manual_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.StartManualTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_transfer_run_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_transfer_run), - '__call__') as call: - client.get_transfer_run(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.GetTransferRunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_transfer_run_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_transfer_run), - '__call__') as call: - client.delete_transfer_run(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.DeleteTransferRunRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_transfer_runs_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_runs), - '__call__') as call: - client.list_transfer_runs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferRunsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_transfer_logs_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_transfer_logs), - '__call__') as call: - client.list_transfer_logs(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.ListTransferLogsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_check_valid_creds_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.check_valid_creds), - '__call__') as call: - client.check_valid_creds(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.CheckValidCredsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_enroll_data_sources_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.enroll_data_sources), - '__call__') as call: - client.enroll_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.EnrollDataSourcesRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_unenroll_data_sources_empty_call_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.unenroll_data_sources), - '__call__') as call: - client.unenroll_data_sources(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datatransfer.UnenrollDataSourcesRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataTransferServiceGrpcTransport, - ) - -def test_data_transfer_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataTransferServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_data_transfer_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DataTransferServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_data_source', - 'list_data_sources', - 'create_transfer_config', - 'update_transfer_config', - 'delete_transfer_config', - 'get_transfer_config', - 'list_transfer_configs', - 'schedule_transfer_runs', - 'start_manual_transfer_runs', - 'get_transfer_run', - 'delete_transfer_run', - 'list_transfer_runs', - 'list_transfer_logs', - 'check_valid_creds', - 'enroll_data_sources', - 'unenroll_data_sources', - 'get_location', - 'list_locations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_data_transfer_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataTransferServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_data_transfer_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataTransferServiceTransport() - adc.assert_called_once() - - -def test_data_transfer_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataTransferServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTransferServiceGrpcTransport, - transports.DataTransferServiceGrpcAsyncIOTransport, - ], -) -def test_data_transfer_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTransferServiceGrpcTransport, - transports.DataTransferServiceGrpcAsyncIOTransport, - transports.DataTransferServiceRestTransport, - ], -) -def test_data_transfer_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataTransferServiceGrpcTransport, grpc_helpers), - (transports.DataTransferServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_data_transfer_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigquerydatatransfer.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigquerydatatransfer.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DataTransferServiceGrpcTransport, transports.DataTransferServiceGrpcAsyncIOTransport]) -def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_data_transfer_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.DataTransferServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_transfer_service_host_no_port(transport_name): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerydatatransfer.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerydatatransfer.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigquerydatatransfer.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_data_transfer_service_host_with_port(transport_name): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerydatatransfer.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerydatatransfer.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigquerydatatransfer.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_data_transfer_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = DataTransferServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = DataTransferServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_data_source._session - session2 = client2.transport.get_data_source._session - assert session1 != session2 - session1 = client1.transport.list_data_sources._session - session2 = client2.transport.list_data_sources._session - assert session1 != session2 - session1 = client1.transport.create_transfer_config._session - session2 = client2.transport.create_transfer_config._session - assert session1 != session2 - session1 = client1.transport.update_transfer_config._session - session2 = client2.transport.update_transfer_config._session - assert session1 != session2 - session1 = client1.transport.delete_transfer_config._session - session2 = client2.transport.delete_transfer_config._session - assert session1 != session2 - session1 = client1.transport.get_transfer_config._session - session2 = client2.transport.get_transfer_config._session - assert session1 != session2 - session1 = client1.transport.list_transfer_configs._session - session2 = client2.transport.list_transfer_configs._session - assert session1 != session2 - session1 = client1.transport.schedule_transfer_runs._session - session2 = client2.transport.schedule_transfer_runs._session - assert session1 != session2 - session1 = client1.transport.start_manual_transfer_runs._session - session2 = client2.transport.start_manual_transfer_runs._session - assert session1 != session2 - session1 = client1.transport.get_transfer_run._session - session2 = client2.transport.get_transfer_run._session - assert session1 != session2 - session1 = client1.transport.delete_transfer_run._session - session2 = client2.transport.delete_transfer_run._session - assert session1 != session2 - session1 = client1.transport.list_transfer_runs._session - session2 = client2.transport.list_transfer_runs._session - assert session1 != session2 - session1 = client1.transport.list_transfer_logs._session - session2 = client2.transport.list_transfer_logs._session - assert session1 != session2 - session1 = client1.transport.check_valid_creds._session - session2 = client2.transport.check_valid_creds._session - assert session1 != session2 - session1 = client1.transport.enroll_data_sources._session - session2 = client2.transport.enroll_data_sources._session - assert session1 != session2 - session1 = client1.transport.unenroll_data_sources._session - session2 = client2.transport.unenroll_data_sources._session - assert session1 != session2 -def test_data_transfer_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataTransferServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_data_transfer_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DataTransferServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataTransferServiceGrpcTransport, transports.DataTransferServiceGrpcAsyncIOTransport]) -def test_data_transfer_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DataTransferServiceGrpcTransport, transports.DataTransferServiceGrpcAsyncIOTransport]) -def test_data_transfer_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_data_source_path(): - project = "squid" - data_source = "clam" - expected = "projects/{project}/dataSources/{data_source}".format(project=project, data_source=data_source, ) - actual = DataTransferServiceClient.data_source_path(project, data_source) - assert expected == actual - - -def test_parse_data_source_path(): - expected = { - "project": "whelk", - "data_source": "octopus", - } - path = DataTransferServiceClient.data_source_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_data_source_path(path) - assert expected == actual - -def test_run_path(): - project = "oyster" - transfer_config = "nudibranch" - run = "cuttlefish" - expected = "projects/{project}/transferConfigs/{transfer_config}/runs/{run}".format(project=project, transfer_config=transfer_config, run=run, ) - actual = DataTransferServiceClient.run_path(project, transfer_config, run) - assert expected == actual - - -def test_parse_run_path(): - expected = { - "project": "mussel", - "transfer_config": "winkle", - "run": "nautilus", - } - path = DataTransferServiceClient.run_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_run_path(path) - assert expected == actual - -def test_transfer_config_path(): - project = "scallop" - transfer_config = "abalone" - expected = "projects/{project}/transferConfigs/{transfer_config}".format(project=project, transfer_config=transfer_config, ) - actual = DataTransferServiceClient.transfer_config_path(project, transfer_config) - assert expected == actual - - -def test_parse_transfer_config_path(): - expected = { - "project": "squid", - "transfer_config": "clam", - } - path = DataTransferServiceClient.transfer_config_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_transfer_config_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DataTransferServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = DataTransferServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = DataTransferServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = DataTransferServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DataTransferServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = DataTransferServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = DataTransferServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = DataTransferServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DataTransferServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = DataTransferServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DataTransferServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DataTransferServiceTransport, '_prep_wrapped_messages') as prep: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DataTransferServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DataTransferServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_list_locations(transport: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - response = client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -@pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc_asyncio"): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.ListLocationsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) - -def test_list_locations_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = locations_pb2.ListLocationsResponse() - - client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] -@pytest.mark.asyncio -async def test_list_locations_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.ListLocationsRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - await client.list_locations(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations",) in kw["metadata"] - -def test_list_locations_from_dict(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.ListLocationsResponse() - - response = client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_list_locations_from_dict_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.ListLocationsResponse() - ) - response = await client.list_locations( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_location(transport: str = "grpc"): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - response = client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) -@pytest.mark.asyncio -async def test_get_location_async(transport: str = "grpc_asyncio"): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = locations_pb2.GetLocationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) - -def test_get_location_field_headers(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials()) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = locations_pb2.Location() - - client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] -@pytest.mark.asyncio -async def test_get_location_field_headers_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials() - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = locations_pb2.GetLocationRequest() - request.name = "locations/abc" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_location), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - await client.get_location(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] - -def test_get_location_from_dict(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = locations_pb2.Location() - - response = client.get_location( - request={ - "name": "locations/abc", - } - ) - call.assert_called() -@pytest.mark.asyncio -async def test_get_location_from_dict_async(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_locations), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - locations_pb2.Location() - ) - response = await client.get_location( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_transport_close_grpc(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = DataTransferServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (DataTransferServiceClient, transports.DataTransferServiceGrpcTransport), - (DataTransferServiceAsyncClient, transports.DataTransferServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/.coveragerc b/owl-bot-staging/google-cloud-bigquery-migration/v2/.coveragerc deleted file mode 100644 index c16fcf7df5d3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_migration/__init__.py - google/cloud/bigquery_migration/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/.flake8 b/owl-bot-staging/google-cloud-bigquery-migration/v2/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-migration/v2/MANIFEST.in deleted file mode 100644 index 02b23d7afabb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_migration *.py -recursive-include google/cloud/bigquery_migration_v2 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/README.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2/README.rst deleted file mode 100644 index 0df3dd4690ca..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Migration API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Migration API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/migration_service.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/migration_service.rst deleted file mode 100644 index 513891e3e8c9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/migration_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MigrationService ----------------------------------- - -.. automodule:: google.cloud.bigquery_migration_v2.services.migration_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_migration_v2.services.migration_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/services_.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/services_.rst deleted file mode 100644 index 74ebb7ac08f8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Migration v2 API -=================================================== -.. toctree:: - :maxdepth: 2 - - migration_service diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/types_.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/types_.rst deleted file mode 100644 index 60d74a5de948..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/bigquery_migration_v2/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Migration v2 API -================================================ - -.. automodule:: google.cloud.bigquery_migration_v2.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/conf.py deleted file mode 100644 index 88db521b0d7e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-migration documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-migration" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-migration-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-migration.tex", - u"google-cloud-bigquery-migration Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-migration", - u"Google Cloud Bigquery Migration Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-migration", - u"google-cloud-bigquery-migration Documentation", - author, - "google-cloud-bigquery-migration", - "GAPIC library for Google Cloud Bigquery Migration API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/index.rst deleted file mode 100644 index fe6757d0c22a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_migration_v2/services_ - bigquery_migration_v2/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/__init__.py deleted file mode 100644 index f3f76e3fc6bc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/__init__.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_migration import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_migration_v2.services.migration_service.client import MigrationServiceClient -from google.cloud.bigquery_migration_v2.services.migration_service.async_client import MigrationServiceAsyncClient - -from google.cloud.bigquery_migration_v2.types.migration_entities import MigrationSubtask -from google.cloud.bigquery_migration_v2.types.migration_entities import MigrationTask -from google.cloud.bigquery_migration_v2.types.migration_entities import MigrationTaskResult -from google.cloud.bigquery_migration_v2.types.migration_entities import MigrationWorkflow -from google.cloud.bigquery_migration_v2.types.migration_entities import TranslationTaskResult -from google.cloud.bigquery_migration_v2.types.migration_error_details import ErrorDetail -from google.cloud.bigquery_migration_v2.types.migration_error_details import ErrorLocation -from google.cloud.bigquery_migration_v2.types.migration_error_details import ResourceErrorDetail -from google.cloud.bigquery_migration_v2.types.migration_metrics import Point -from google.cloud.bigquery_migration_v2.types.migration_metrics import TimeInterval -from google.cloud.bigquery_migration_v2.types.migration_metrics import TimeSeries -from google.cloud.bigquery_migration_v2.types.migration_metrics import TypedValue -from google.cloud.bigquery_migration_v2.types.migration_service import CreateMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2.types.migration_service import DeleteMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2.types.migration_service import GetMigrationSubtaskRequest -from google.cloud.bigquery_migration_v2.types.migration_service import GetMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2.types.migration_service import ListMigrationSubtasksRequest -from google.cloud.bigquery_migration_v2.types.migration_service import ListMigrationSubtasksResponse -from google.cloud.bigquery_migration_v2.types.migration_service import ListMigrationWorkflowsRequest -from google.cloud.bigquery_migration_v2.types.migration_service import ListMigrationWorkflowsResponse -from google.cloud.bigquery_migration_v2.types.migration_service import StartMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2.types.translation_config import AzureSynapseDialect -from google.cloud.bigquery_migration_v2.types.translation_config import BigQueryDialect -from google.cloud.bigquery_migration_v2.types.translation_config import DB2Dialect -from google.cloud.bigquery_migration_v2.types.translation_config import Dialect -from google.cloud.bigquery_migration_v2.types.translation_config import GreenplumDialect -from google.cloud.bigquery_migration_v2.types.translation_config import HiveQLDialect -from google.cloud.bigquery_migration_v2.types.translation_config import MySQLDialect -from google.cloud.bigquery_migration_v2.types.translation_config import NameMappingKey -from google.cloud.bigquery_migration_v2.types.translation_config import NameMappingValue -from google.cloud.bigquery_migration_v2.types.translation_config import NetezzaDialect -from google.cloud.bigquery_migration_v2.types.translation_config import ObjectNameMapping -from google.cloud.bigquery_migration_v2.types.translation_config import ObjectNameMappingList -from google.cloud.bigquery_migration_v2.types.translation_config import OracleDialect -from google.cloud.bigquery_migration_v2.types.translation_config import PostgresqlDialect -from google.cloud.bigquery_migration_v2.types.translation_config import PrestoDialect -from google.cloud.bigquery_migration_v2.types.translation_config import RedshiftDialect -from google.cloud.bigquery_migration_v2.types.translation_config import SnowflakeDialect -from google.cloud.bigquery_migration_v2.types.translation_config import SourceEnv -from google.cloud.bigquery_migration_v2.types.translation_config import SparkSQLDialect -from google.cloud.bigquery_migration_v2.types.translation_config import SQLiteDialect -from google.cloud.bigquery_migration_v2.types.translation_config import SQLServerDialect -from google.cloud.bigquery_migration_v2.types.translation_config import TeradataDialect -from google.cloud.bigquery_migration_v2.types.translation_config import TranslationConfigDetails -from google.cloud.bigquery_migration_v2.types.translation_config import VerticaDialect -from google.cloud.bigquery_migration_v2.types.translation_details import Literal -from google.cloud.bigquery_migration_v2.types.translation_details import SourceEnvironment -from google.cloud.bigquery_migration_v2.types.translation_details import SourceSpec -from google.cloud.bigquery_migration_v2.types.translation_details import SourceTargetMapping -from google.cloud.bigquery_migration_v2.types.translation_details import TargetSpec -from google.cloud.bigquery_migration_v2.types.translation_details import TranslationDetails -from google.cloud.bigquery_migration_v2.types.translation_suggestion import TranslationReportRecord -from google.cloud.bigquery_migration_v2.types.translation_usability import GcsReportLogMessage - -__all__ = ('MigrationServiceClient', - 'MigrationServiceAsyncClient', - 'MigrationSubtask', - 'MigrationTask', - 'MigrationTaskResult', - 'MigrationWorkflow', - 'TranslationTaskResult', - 'ErrorDetail', - 'ErrorLocation', - 'ResourceErrorDetail', - 'Point', - 'TimeInterval', - 'TimeSeries', - 'TypedValue', - 'CreateMigrationWorkflowRequest', - 'DeleteMigrationWorkflowRequest', - 'GetMigrationSubtaskRequest', - 'GetMigrationWorkflowRequest', - 'ListMigrationSubtasksRequest', - 'ListMigrationSubtasksResponse', - 'ListMigrationWorkflowsRequest', - 'ListMigrationWorkflowsResponse', - 'StartMigrationWorkflowRequest', - 'AzureSynapseDialect', - 'BigQueryDialect', - 'DB2Dialect', - 'Dialect', - 'GreenplumDialect', - 'HiveQLDialect', - 'MySQLDialect', - 'NameMappingKey', - 'NameMappingValue', - 'NetezzaDialect', - 'ObjectNameMapping', - 'ObjectNameMappingList', - 'OracleDialect', - 'PostgresqlDialect', - 'PrestoDialect', - 'RedshiftDialect', - 'SnowflakeDialect', - 'SourceEnv', - 'SparkSQLDialect', - 'SQLiteDialect', - 'SQLServerDialect', - 'TeradataDialect', - 'TranslationConfigDetails', - 'VerticaDialect', - 'Literal', - 'SourceEnvironment', - 'SourceSpec', - 'SourceTargetMapping', - 'TargetSpec', - 'TranslationDetails', - 'TranslationReportRecord', - 'GcsReportLogMessage', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/py.typed b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/py.typed deleted file mode 100644 index fcbd0b586dd2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-migration package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/__init__.py deleted file mode 100644 index 394c7c11064f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/__init__.py +++ /dev/null @@ -1,134 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_migration_v2 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.migration_service import MigrationServiceClient -from .services.migration_service import MigrationServiceAsyncClient - -from .types.migration_entities import MigrationSubtask -from .types.migration_entities import MigrationTask -from .types.migration_entities import MigrationTaskResult -from .types.migration_entities import MigrationWorkflow -from .types.migration_entities import TranslationTaskResult -from .types.migration_error_details import ErrorDetail -from .types.migration_error_details import ErrorLocation -from .types.migration_error_details import ResourceErrorDetail -from .types.migration_metrics import Point -from .types.migration_metrics import TimeInterval -from .types.migration_metrics import TimeSeries -from .types.migration_metrics import TypedValue -from .types.migration_service import CreateMigrationWorkflowRequest -from .types.migration_service import DeleteMigrationWorkflowRequest -from .types.migration_service import GetMigrationSubtaskRequest -from .types.migration_service import GetMigrationWorkflowRequest -from .types.migration_service import ListMigrationSubtasksRequest -from .types.migration_service import ListMigrationSubtasksResponse -from .types.migration_service import ListMigrationWorkflowsRequest -from .types.migration_service import ListMigrationWorkflowsResponse -from .types.migration_service import StartMigrationWorkflowRequest -from .types.translation_config import AzureSynapseDialect -from .types.translation_config import BigQueryDialect -from .types.translation_config import DB2Dialect -from .types.translation_config import Dialect -from .types.translation_config import GreenplumDialect -from .types.translation_config import HiveQLDialect -from .types.translation_config import MySQLDialect -from .types.translation_config import NameMappingKey -from .types.translation_config import NameMappingValue -from .types.translation_config import NetezzaDialect -from .types.translation_config import ObjectNameMapping -from .types.translation_config import ObjectNameMappingList -from .types.translation_config import OracleDialect -from .types.translation_config import PostgresqlDialect -from .types.translation_config import PrestoDialect -from .types.translation_config import RedshiftDialect -from .types.translation_config import SnowflakeDialect -from .types.translation_config import SourceEnv -from .types.translation_config import SparkSQLDialect -from .types.translation_config import SQLiteDialect -from .types.translation_config import SQLServerDialect -from .types.translation_config import TeradataDialect -from .types.translation_config import TranslationConfigDetails -from .types.translation_config import VerticaDialect -from .types.translation_details import Literal -from .types.translation_details import SourceEnvironment -from .types.translation_details import SourceSpec -from .types.translation_details import SourceTargetMapping -from .types.translation_details import TargetSpec -from .types.translation_details import TranslationDetails -from .types.translation_suggestion import TranslationReportRecord -from .types.translation_usability import GcsReportLogMessage - -__all__ = ( - 'MigrationServiceAsyncClient', -'AzureSynapseDialect', -'BigQueryDialect', -'CreateMigrationWorkflowRequest', -'DB2Dialect', -'DeleteMigrationWorkflowRequest', -'Dialect', -'ErrorDetail', -'ErrorLocation', -'GcsReportLogMessage', -'GetMigrationSubtaskRequest', -'GetMigrationWorkflowRequest', -'GreenplumDialect', -'HiveQLDialect', -'ListMigrationSubtasksRequest', -'ListMigrationSubtasksResponse', -'ListMigrationWorkflowsRequest', -'ListMigrationWorkflowsResponse', -'Literal', -'MigrationServiceClient', -'MigrationSubtask', -'MigrationTask', -'MigrationTaskResult', -'MigrationWorkflow', -'MySQLDialect', -'NameMappingKey', -'NameMappingValue', -'NetezzaDialect', -'ObjectNameMapping', -'ObjectNameMappingList', -'OracleDialect', -'Point', -'PostgresqlDialect', -'PrestoDialect', -'RedshiftDialect', -'ResourceErrorDetail', -'SQLServerDialect', -'SQLiteDialect', -'SnowflakeDialect', -'SourceEnv', -'SourceEnvironment', -'SourceSpec', -'SourceTargetMapping', -'SparkSQLDialect', -'StartMigrationWorkflowRequest', -'TargetSpec', -'TeradataDialect', -'TimeInterval', -'TimeSeries', -'TranslationConfigDetails', -'TranslationDetails', -'TranslationReportRecord', -'TranslationTaskResult', -'TypedValue', -'VerticaDialect', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/gapic_metadata.json deleted file mode 100644 index f841186e6e8d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/gapic_metadata.json +++ /dev/null @@ -1,93 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_migration_v2", - "protoPackage": "google.cloud.bigquery.migration.v2", - "schema": "1.0", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "create_migration_workflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "delete_migration_workflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "get_migration_subtask" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "get_migration_workflow" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "list_migration_subtasks" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "list_migration_workflows" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "start_migration_workflow" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MigrationServiceAsyncClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "create_migration_workflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "delete_migration_workflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "get_migration_subtask" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "get_migration_workflow" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "list_migration_subtasks" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "list_migration_workflows" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "start_migration_workflow" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/py.typed b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/py.typed deleted file mode 100644 index fcbd0b586dd2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-migration package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/__init__.py deleted file mode 100644 index 9a438d91f688..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MigrationServiceClient -from .async_client import MigrationServiceAsyncClient - -__all__ = ( - 'MigrationServiceClient', - 'MigrationServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py deleted file mode 100644 index 5b158c04b5ad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py +++ /dev/null @@ -1,1039 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_migration_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_migration_v2.services.migration_service import pagers -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_error_details -from google.cloud.bigquery_migration_v2.types import migration_metrics -from google.cloud.bigquery_migration_v2.types import migration_service -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore -from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport -from .client import MigrationServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class MigrationServiceAsyncClient: - """Service to handle EDW migrations.""" - - _client: MigrationServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MigrationServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = MigrationServiceClient._DEFAULT_UNIVERSE - - migration_subtask_path = staticmethod(MigrationServiceClient.migration_subtask_path) - parse_migration_subtask_path = staticmethod(MigrationServiceClient.parse_migration_subtask_path) - migration_workflow_path = staticmethod(MigrationServiceClient.migration_workflow_path) - parse_migration_workflow_path = staticmethod(MigrationServiceClient.parse_migration_workflow_path) - common_billing_account_path = staticmethod(MigrationServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MigrationServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MigrationServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MigrationServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MigrationServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MigrationServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MigrationServiceClient.common_project_path) - parse_common_project_path = staticmethod(MigrationServiceClient.parse_common_project_path) - common_location_path = staticmethod(MigrationServiceClient.common_location_path) - parse_common_location_path = staticmethod(MigrationServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceAsyncClient: The constructed client. - """ - return MigrationServiceClient.from_service_account_info.__func__(MigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceAsyncClient: The constructed client. - """ - return MigrationServiceClient.from_service_account_file.__func__(MigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MigrationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MigrationServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = MigrationServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MigrationServiceTransport, Callable[..., MigrationServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the migration service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MigrationServiceTransport,Callable[..., MigrationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MigrationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MigrationServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.migration_v2.MigrationServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "credentialsType": None, - } - ) - - async def create_migration_workflow(self, - request: Optional[Union[migration_service.CreateMigrationWorkflowRequest, dict]] = None, - *, - parent: Optional[str] = None, - migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Creates a migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest, dict]]): - The request object. Request to create a migration - workflow resource. - parent (:class:`str`): - Required. The name of the project to which this - migration workflow belongs. Example: - ``projects/foo/locations/bar`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_workflow (:class:`google.cloud.bigquery_migration_v2.types.MigrationWorkflow`): - Required. The migration workflow to - create. - - This corresponds to the ``migration_workflow`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, migration_workflow] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.CreateMigrationWorkflowRequest): - request = migration_service.CreateMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if migration_workflow is not None: - request.migration_workflow = migration_workflow - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_migration_workflow(self, - request: Optional[Union[migration_service.GetMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Gets a previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest, dict]]): - The request object. A request to get a previously created - migration workflow. - name (:class:`str`): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationWorkflowRequest): - request = migration_service.GetMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_migration_workflows(self, - request: Optional[Union[migration_service.ListMigrationWorkflowsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationWorkflowsAsyncPager: - r"""Lists previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest, dict]]): - The request object. A request to list previously created - migration workflows. - parent (:class:`str`): - Required. The project and location of the migration - workflows to list. Example: - ``projects/123/locations/us`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager: - Response object for a ListMigrationWorkflows call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationWorkflowsRequest): - request = migration_service.ListMigrationWorkflowsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_migration_workflows] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMigrationWorkflowsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_migration_workflow(self, - request: Optional[Union[migration_service.DeleteMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a migration workflow by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.delete_migration_workflow(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest, dict]]): - The request object. A request to delete a previously - created migration workflow. - name (:class:`str`): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest): - request = migration_service.DeleteMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def start_migration_workflow(self, - request: Optional[Union[migration_service.StartMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.start_migration_workflow(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest, dict]]): - The request object. A request to start a previously - created migration workflow. - name (:class:`str`): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.StartMigrationWorkflowRequest): - request = migration_service.StartMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.start_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_migration_subtask(self, - request: Optional[Union[migration_service.GetMigrationSubtaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationSubtask: - r"""Gets a previously created migration subtask. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_subtask(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest, dict]]): - The request object. A request to get a previously created - migration subtasks. - name (:class:`str`): - Required. The unique identifier for the migration - subtask. Example: - ``projects/123/locations/us/workflows/1234/subtasks/543`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.types.MigrationSubtask: - A subtask for a migration which - carries details about the configuration - of the subtask. The content of the - details should not matter to the end - user, but is a contract between the - subtask creator and subtask worker. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationSubtaskRequest): - request = migration_service.GetMigrationSubtaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_migration_subtask] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_migration_subtasks(self, - request: Optional[Union[migration_service.ListMigrationSubtasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationSubtasksAsyncPager: - r"""Lists previously created migration subtasks. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - async def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest, dict]]): - The request object. A request to list previously created - migration subtasks. - parent (:class:`str`): - Required. The migration task of the subtasks to list. - Example: ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksAsyncPager: - Response object for a ListMigrationSubtasks call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationSubtasksRequest): - request = migration_service.ListMigrationSubtasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_migration_subtasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMigrationSubtasksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "MigrationServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MigrationServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/client.py deleted file mode 100644 index 9d4f53137297..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/client.py +++ /dev/null @@ -1,1401 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_migration_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_migration_v2.services.migration_service import pagers -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_error_details -from google.cloud.bigquery_migration_v2.types import migration_metrics -from google.cloud.bigquery_migration_v2.types import migration_service -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore -from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MigrationServiceGrpcTransport -from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport - - -class MigrationServiceClientMeta(type): - """Metaclass for the MigrationService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MigrationServiceTransport]] - _transport_registry["grpc"] = MigrationServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MigrationServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MigrationServiceClient(metaclass=MigrationServiceClientMeta): - """Service to handle EDW migrations.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigquerymigration.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MigrationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MigrationServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def migration_subtask_path(project: str,location: str,workflow: str,subtask: str,) -> str: - """Returns a fully-qualified migration_subtask string.""" - return "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format(project=project, location=location, workflow=workflow, subtask=subtask, ) - - @staticmethod - def parse_migration_subtask_path(path: str) -> Dict[str,str]: - """Parses a migration_subtask path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)/subtasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def migration_workflow_path(project: str,location: str,workflow: str,) -> str: - """Returns a fully-qualified migration_workflow string.""" - return "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) - - @staticmethod - def parse_migration_workflow_path(path: str) -> Dict[str,str]: - """Parses a migration_workflow path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = MigrationServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MigrationServiceTransport, Callable[..., MigrationServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the migration service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MigrationServiceTransport,Callable[..., MigrationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MigrationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MigrationServiceClient._read_environment_variables() - self._client_cert_source = MigrationServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MigrationServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MigrationServiceTransport) - if transport_provided: - # transport is a MigrationServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(MigrationServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - MigrationServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[MigrationServiceTransport], Callable[..., MigrationServiceTransport]] = ( - MigrationServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., MigrationServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.migration_v2.MigrationServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "credentialsType": None, - } - ) - - def create_migration_workflow(self, - request: Optional[Union[migration_service.CreateMigrationWorkflowRequest, dict]] = None, - *, - parent: Optional[str] = None, - migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Creates a migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest, dict]): - The request object. Request to create a migration - workflow resource. - parent (str): - Required. The name of the project to which this - migration workflow belongs. Example: - ``projects/foo/locations/bar`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_workflow (google.cloud.bigquery_migration_v2.types.MigrationWorkflow): - Required. The migration workflow to - create. - - This corresponds to the ``migration_workflow`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, migration_workflow] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.CreateMigrationWorkflowRequest): - request = migration_service.CreateMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if migration_workflow is not None: - request.migration_workflow = migration_workflow - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_migration_workflow(self, - request: Optional[Union[migration_service.GetMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Gets a previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest, dict]): - The request object. A request to get a previously created - migration workflow. - name (str): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationWorkflowRequest): - request = migration_service.GetMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_migration_workflows(self, - request: Optional[Union[migration_service.ListMigrationWorkflowsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationWorkflowsPager: - r"""Lists previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest, dict]): - The request object. A request to list previously created - migration workflows. - parent (str): - Required. The project and location of the migration - workflows to list. Example: - ``projects/123/locations/us`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsPager: - Response object for a ListMigrationWorkflows call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationWorkflowsRequest): - request = migration_service.ListMigrationWorkflowsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_migration_workflows] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMigrationWorkflowsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_migration_workflow(self, - request: Optional[Union[migration_service.DeleteMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a migration workflow by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.delete_migration_workflow(request=request) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest, dict]): - The request object. A request to delete a previously - created migration workflow. - name (str): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest): - request = migration_service.DeleteMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def start_migration_workflow(self, - request: Optional[Union[migration_service.StartMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.start_migration_workflow(request=request) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest, dict]): - The request object. A request to start a previously - created migration workflow. - name (str): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.StartMigrationWorkflowRequest): - request = migration_service.StartMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.start_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_migration_subtask(self, - request: Optional[Union[migration_service.GetMigrationSubtaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationSubtask: - r"""Gets a previously created migration subtask. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_subtask(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest, dict]): - The request object. A request to get a previously created - migration subtasks. - name (str): - Required. The unique identifier for the migration - subtask. Example: - ``projects/123/locations/us/workflows/1234/subtasks/543`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.types.MigrationSubtask: - A subtask for a migration which - carries details about the configuration - of the subtask. The content of the - details should not matter to the end - user, but is a contract between the - subtask creator and subtask worker. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationSubtaskRequest): - request = migration_service.GetMigrationSubtaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_migration_subtask] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_migration_subtasks(self, - request: Optional[Union[migration_service.ListMigrationSubtasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationSubtasksPager: - r"""Lists previously created migration subtasks. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2 - - def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest, dict]): - The request object. A request to list previously created - migration subtasks. - parent (str): - Required. The migration task of the subtasks to list. - Example: ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksPager: - Response object for a ListMigrationSubtasks call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationSubtasksRequest): - request = migration_service.ListMigrationSubtasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_migration_subtasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMigrationSubtasksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "MigrationServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MigrationServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/pagers.py deleted file mode 100644 index 66db45fcd438..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/pagers.py +++ /dev/null @@ -1,306 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_service - - -class ListMigrationWorkflowsPager: - """A pager for iterating through ``list_migration_workflows`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``migration_workflows`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMigrationWorkflows`` requests and continue to iterate - through the ``migration_workflows`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., migration_service.ListMigrationWorkflowsResponse], - request: migration_service.ListMigrationWorkflowsRequest, - response: migration_service.ListMigrationWorkflowsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationWorkflowsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[migration_service.ListMigrationWorkflowsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[migration_entities.MigrationWorkflow]: - for page in self.pages: - yield from page.migration_workflows - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationWorkflowsAsyncPager: - """A pager for iterating through ``list_migration_workflows`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``migration_workflows`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMigrationWorkflows`` requests and continue to iterate - through the ``migration_workflows`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[migration_service.ListMigrationWorkflowsResponse]], - request: migration_service.ListMigrationWorkflowsRequest, - response: migration_service.ListMigrationWorkflowsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationWorkflowsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[migration_service.ListMigrationWorkflowsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[migration_entities.MigrationWorkflow]: - async def async_generator(): - async for page in self.pages: - for response in page.migration_workflows: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationSubtasksPager: - """A pager for iterating through ``list_migration_subtasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``migration_subtasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMigrationSubtasks`` requests and continue to iterate - through the ``migration_subtasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., migration_service.ListMigrationSubtasksResponse], - request: migration_service.ListMigrationSubtasksRequest, - response: migration_service.ListMigrationSubtasksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationSubtasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[migration_service.ListMigrationSubtasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[migration_entities.MigrationSubtask]: - for page in self.pages: - yield from page.migration_subtasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationSubtasksAsyncPager: - """A pager for iterating through ``list_migration_subtasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``migration_subtasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMigrationSubtasks`` requests and continue to iterate - through the ``migration_subtasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[migration_service.ListMigrationSubtasksResponse]], - request: migration_service.ListMigrationSubtasksRequest, - response: migration_service.ListMigrationSubtasksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationSubtasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[migration_service.ListMigrationSubtasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[migration_entities.MigrationSubtask]: - async def async_generator(): - async for page in self.pages: - for response in page.migration_subtasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/README.rst deleted file mode 100644 index ef5e0cb2c853..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`MigrationServiceTransport` is the ABC for all transports. -- public child `MigrationServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `MigrationServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseMigrationServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `MigrationServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/__init__.py deleted file mode 100644 index fc01e4daba77..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MigrationServiceTransport -from .grpc import MigrationServiceGrpcTransport -from .grpc_asyncio import MigrationServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MigrationServiceTransport]] -_transport_registry['grpc'] = MigrationServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MigrationServiceGrpcAsyncIOTransport - -__all__ = ( - 'MigrationServiceTransport', - 'MigrationServiceGrpcTransport', - 'MigrationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/base.py deleted file mode 100644 index fb2ef62b2d75..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/base.py +++ /dev/null @@ -1,267 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_migration_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_service -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MigrationServiceTransport(abc.ABC): - """Abstract transport class for MigrationService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigquerymigration.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_migration_workflow: gapic_v1.method.wrap_method( - self.create_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_workflow: gapic_v1.method.wrap_method( - self.get_migration_workflow, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_migration_workflows: gapic_v1.method.wrap_method( - self.list_migration_workflows, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_migration_workflow: gapic_v1.method.wrap_method( - self.delete_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.start_migration_workflow: gapic_v1.method.wrap_method( - self.start_migration_workflow, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_subtask: gapic_v1.method.wrap_method( - self.get_migration_subtask, - default_timeout=None, - client_info=client_info, - ), - self.list_migration_subtasks: gapic_v1.method.wrap_method( - self.list_migration_subtasks, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_migration_workflow(self) -> Callable[ - [migration_service.CreateMigrationWorkflowRequest], - Union[ - migration_entities.MigrationWorkflow, - Awaitable[migration_entities.MigrationWorkflow] - ]]: - raise NotImplementedError() - - @property - def get_migration_workflow(self) -> Callable[ - [migration_service.GetMigrationWorkflowRequest], - Union[ - migration_entities.MigrationWorkflow, - Awaitable[migration_entities.MigrationWorkflow] - ]]: - raise NotImplementedError() - - @property - def list_migration_workflows(self) -> Callable[ - [migration_service.ListMigrationWorkflowsRequest], - Union[ - migration_service.ListMigrationWorkflowsResponse, - Awaitable[migration_service.ListMigrationWorkflowsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_migration_workflow(self) -> Callable[ - [migration_service.DeleteMigrationWorkflowRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def start_migration_workflow(self) -> Callable[ - [migration_service.StartMigrationWorkflowRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_migration_subtask(self) -> Callable[ - [migration_service.GetMigrationSubtaskRequest], - Union[ - migration_entities.MigrationSubtask, - Awaitable[migration_entities.MigrationSubtask] - ]]: - raise NotImplementedError() - - @property - def list_migration_subtasks(self) -> Callable[ - [migration_service.ListMigrationSubtasksRequest], - Union[ - migration_service.ListMigrationSubtasksResponse, - Awaitable[migration_service.ListMigrationSubtasksResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MigrationServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc.py deleted file mode 100644 index e50fad846465..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc.py +++ /dev/null @@ -1,509 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_service -from google.protobuf import empty_pb2 # type: ignore -from .base import MigrationServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MigrationServiceGrpcTransport(MigrationServiceTransport): - """gRPC backend transport for MigrationService. - - Service to handle EDW migrations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_migration_workflow(self) -> Callable[ - [migration_service.CreateMigrationWorkflowRequest], - migration_entities.MigrationWorkflow]: - r"""Return a callable for the create migration workflow method over gRPC. - - Creates a migration workflow. - - Returns: - Callable[[~.CreateMigrationWorkflowRequest], - ~.MigrationWorkflow]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_migration_workflow' not in self._stubs: - self._stubs['create_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/CreateMigrationWorkflow', - request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['create_migration_workflow'] - - @property - def get_migration_workflow(self) -> Callable[ - [migration_service.GetMigrationWorkflowRequest], - migration_entities.MigrationWorkflow]: - r"""Return a callable for the get migration workflow method over gRPC. - - Gets a previously created migration workflow. - - Returns: - Callable[[~.GetMigrationWorkflowRequest], - ~.MigrationWorkflow]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_workflow' not in self._stubs: - self._stubs['get_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationWorkflow', - request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['get_migration_workflow'] - - @property - def list_migration_workflows(self) -> Callable[ - [migration_service.ListMigrationWorkflowsRequest], - migration_service.ListMigrationWorkflowsResponse]: - r"""Return a callable for the list migration workflows method over gRPC. - - Lists previously created migration workflow. - - Returns: - Callable[[~.ListMigrationWorkflowsRequest], - ~.ListMigrationWorkflowsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_workflows' not in self._stubs: - self._stubs['list_migration_workflows'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationWorkflows', - request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, - response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, - ) - return self._stubs['list_migration_workflows'] - - @property - def delete_migration_workflow(self) -> Callable[ - [migration_service.DeleteMigrationWorkflowRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete migration workflow method over gRPC. - - Deletes a migration workflow by name. - - Returns: - Callable[[~.DeleteMigrationWorkflowRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_migration_workflow' not in self._stubs: - self._stubs['delete_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/DeleteMigrationWorkflow', - request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_migration_workflow'] - - @property - def start_migration_workflow(self) -> Callable[ - [migration_service.StartMigrationWorkflowRequest], - empty_pb2.Empty]: - r"""Return a callable for the start migration workflow method over gRPC. - - Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - Returns: - Callable[[~.StartMigrationWorkflowRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_migration_workflow' not in self._stubs: - self._stubs['start_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/StartMigrationWorkflow', - request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['start_migration_workflow'] - - @property - def get_migration_subtask(self) -> Callable[ - [migration_service.GetMigrationSubtaskRequest], - migration_entities.MigrationSubtask]: - r"""Return a callable for the get migration subtask method over gRPC. - - Gets a previously created migration subtask. - - Returns: - Callable[[~.GetMigrationSubtaskRequest], - ~.MigrationSubtask]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_subtask' not in self._stubs: - self._stubs['get_migration_subtask'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationSubtask', - request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, - response_deserializer=migration_entities.MigrationSubtask.deserialize, - ) - return self._stubs['get_migration_subtask'] - - @property - def list_migration_subtasks(self) -> Callable[ - [migration_service.ListMigrationSubtasksRequest], - migration_service.ListMigrationSubtasksResponse]: - r"""Return a callable for the list migration subtasks method over gRPC. - - Lists previously created migration subtasks. - - Returns: - Callable[[~.ListMigrationSubtasksRequest], - ~.ListMigrationSubtasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_subtasks' not in self._stubs: - self._stubs['list_migration_subtasks'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationSubtasks', - request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, - response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, - ) - return self._stubs['list_migration_subtasks'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MigrationServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc_asyncio.py deleted file mode 100644 index d33d18cce5bb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/services/migration_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,586 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_service -from google.protobuf import empty_pb2 # type: ignore -from .base import MigrationServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MigrationServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2.MigrationService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MigrationServiceGrpcAsyncIOTransport(MigrationServiceTransport): - """gRPC AsyncIO backend transport for MigrationService. - - Service to handle EDW migrations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_migration_workflow(self) -> Callable[ - [migration_service.CreateMigrationWorkflowRequest], - Awaitable[migration_entities.MigrationWorkflow]]: - r"""Return a callable for the create migration workflow method over gRPC. - - Creates a migration workflow. - - Returns: - Callable[[~.CreateMigrationWorkflowRequest], - Awaitable[~.MigrationWorkflow]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_migration_workflow' not in self._stubs: - self._stubs['create_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/CreateMigrationWorkflow', - request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['create_migration_workflow'] - - @property - def get_migration_workflow(self) -> Callable[ - [migration_service.GetMigrationWorkflowRequest], - Awaitable[migration_entities.MigrationWorkflow]]: - r"""Return a callable for the get migration workflow method over gRPC. - - Gets a previously created migration workflow. - - Returns: - Callable[[~.GetMigrationWorkflowRequest], - Awaitable[~.MigrationWorkflow]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_workflow' not in self._stubs: - self._stubs['get_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationWorkflow', - request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['get_migration_workflow'] - - @property - def list_migration_workflows(self) -> Callable[ - [migration_service.ListMigrationWorkflowsRequest], - Awaitable[migration_service.ListMigrationWorkflowsResponse]]: - r"""Return a callable for the list migration workflows method over gRPC. - - Lists previously created migration workflow. - - Returns: - Callable[[~.ListMigrationWorkflowsRequest], - Awaitable[~.ListMigrationWorkflowsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_workflows' not in self._stubs: - self._stubs['list_migration_workflows'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationWorkflows', - request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, - response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, - ) - return self._stubs['list_migration_workflows'] - - @property - def delete_migration_workflow(self) -> Callable[ - [migration_service.DeleteMigrationWorkflowRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete migration workflow method over gRPC. - - Deletes a migration workflow by name. - - Returns: - Callable[[~.DeleteMigrationWorkflowRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_migration_workflow' not in self._stubs: - self._stubs['delete_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/DeleteMigrationWorkflow', - request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_migration_workflow'] - - @property - def start_migration_workflow(self) -> Callable[ - [migration_service.StartMigrationWorkflowRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the start migration workflow method over gRPC. - - Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - Returns: - Callable[[~.StartMigrationWorkflowRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_migration_workflow' not in self._stubs: - self._stubs['start_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/StartMigrationWorkflow', - request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['start_migration_workflow'] - - @property - def get_migration_subtask(self) -> Callable[ - [migration_service.GetMigrationSubtaskRequest], - Awaitable[migration_entities.MigrationSubtask]]: - r"""Return a callable for the get migration subtask method over gRPC. - - Gets a previously created migration subtask. - - Returns: - Callable[[~.GetMigrationSubtaskRequest], - Awaitable[~.MigrationSubtask]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_subtask' not in self._stubs: - self._stubs['get_migration_subtask'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/GetMigrationSubtask', - request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, - response_deserializer=migration_entities.MigrationSubtask.deserialize, - ) - return self._stubs['get_migration_subtask'] - - @property - def list_migration_subtasks(self) -> Callable[ - [migration_service.ListMigrationSubtasksRequest], - Awaitable[migration_service.ListMigrationSubtasksResponse]]: - r"""Return a callable for the list migration subtasks method over gRPC. - - Lists previously created migration subtasks. - - Returns: - Callable[[~.ListMigrationSubtasksRequest], - Awaitable[~.ListMigrationSubtasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_subtasks' not in self._stubs: - self._stubs['list_migration_subtasks'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2.MigrationService/ListMigrationSubtasks', - request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, - response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, - ) - return self._stubs['list_migration_subtasks'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_migration_workflow: self._wrap_method( - self.create_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_workflow: self._wrap_method( - self.get_migration_workflow, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_migration_workflows: self._wrap_method( - self.list_migration_workflows, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_migration_workflow: self._wrap_method( - self.delete_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.start_migration_workflow: self._wrap_method( - self.start_migration_workflow, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_subtask: self._wrap_method( - self.get_migration_subtask, - default_timeout=None, - client_info=client_info, - ), - self.list_migration_subtasks: self._wrap_method( - self.list_migration_subtasks, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'MigrationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/__init__.py deleted file mode 100644 index 7969a7fee118..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/__init__.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .migration_entities import ( - MigrationSubtask, - MigrationTask, - MigrationTaskResult, - MigrationWorkflow, - TranslationTaskResult, -) -from .migration_error_details import ( - ErrorDetail, - ErrorLocation, - ResourceErrorDetail, -) -from .migration_metrics import ( - Point, - TimeInterval, - TimeSeries, - TypedValue, -) -from .migration_service import ( - CreateMigrationWorkflowRequest, - DeleteMigrationWorkflowRequest, - GetMigrationSubtaskRequest, - GetMigrationWorkflowRequest, - ListMigrationSubtasksRequest, - ListMigrationSubtasksResponse, - ListMigrationWorkflowsRequest, - ListMigrationWorkflowsResponse, - StartMigrationWorkflowRequest, -) -from .translation_config import ( - AzureSynapseDialect, - BigQueryDialect, - DB2Dialect, - Dialect, - GreenplumDialect, - HiveQLDialect, - MySQLDialect, - NameMappingKey, - NameMappingValue, - NetezzaDialect, - ObjectNameMapping, - ObjectNameMappingList, - OracleDialect, - PostgresqlDialect, - PrestoDialect, - RedshiftDialect, - SnowflakeDialect, - SourceEnv, - SparkSQLDialect, - SQLiteDialect, - SQLServerDialect, - TeradataDialect, - TranslationConfigDetails, - VerticaDialect, -) -from .translation_details import ( - Literal, - SourceEnvironment, - SourceSpec, - SourceTargetMapping, - TargetSpec, - TranslationDetails, -) -from .translation_suggestion import ( - TranslationReportRecord, -) -from .translation_usability import ( - GcsReportLogMessage, -) - -__all__ = ( - 'MigrationSubtask', - 'MigrationTask', - 'MigrationTaskResult', - 'MigrationWorkflow', - 'TranslationTaskResult', - 'ErrorDetail', - 'ErrorLocation', - 'ResourceErrorDetail', - 'Point', - 'TimeInterval', - 'TimeSeries', - 'TypedValue', - 'CreateMigrationWorkflowRequest', - 'DeleteMigrationWorkflowRequest', - 'GetMigrationSubtaskRequest', - 'GetMigrationWorkflowRequest', - 'ListMigrationSubtasksRequest', - 'ListMigrationSubtasksResponse', - 'ListMigrationWorkflowsRequest', - 'ListMigrationWorkflowsResponse', - 'StartMigrationWorkflowRequest', - 'AzureSynapseDialect', - 'BigQueryDialect', - 'DB2Dialect', - 'Dialect', - 'GreenplumDialect', - 'HiveQLDialect', - 'MySQLDialect', - 'NameMappingKey', - 'NameMappingValue', - 'NetezzaDialect', - 'ObjectNameMapping', - 'ObjectNameMappingList', - 'OracleDialect', - 'PostgresqlDialect', - 'PrestoDialect', - 'RedshiftDialect', - 'SnowflakeDialect', - 'SourceEnv', - 'SparkSQLDialect', - 'SQLiteDialect', - 'SQLServerDialect', - 'TeradataDialect', - 'TranslationConfigDetails', - 'VerticaDialect', - 'Literal', - 'SourceEnvironment', - 'SourceSpec', - 'SourceTargetMapping', - 'TargetSpec', - 'TranslationDetails', - 'TranslationReportRecord', - 'GcsReportLogMessage', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_entities.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_entities.py deleted file mode 100644 index 897ae9c7f2bc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_entities.py +++ /dev/null @@ -1,463 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bigquery_migration_v2.types import migration_error_details -from google.cloud.bigquery_migration_v2.types import migration_metrics -from google.cloud.bigquery_migration_v2.types import translation_config -from google.cloud.bigquery_migration_v2.types import translation_details as gcbm_translation_details -from google.cloud.bigquery_migration_v2.types import translation_usability -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'MigrationWorkflow', - 'MigrationTask', - 'MigrationSubtask', - 'MigrationTaskResult', - 'TranslationTaskResult', - }, -) - - -class MigrationWorkflow(proto.Message): - r"""A migration workflow which specifies what needs to be done - for an EDW migration. - - Attributes: - name (str): - Output only. Immutable. Identifier. The unique identifier - for the migration workflow. The ID is server-generated. - - Example: ``projects/123/locations/us/workflows/345`` - display_name (str): - The display name of the workflow. This can be - set to give a workflow a descriptive name. There - is no guarantee or enforcement of uniqueness. - tasks (MutableMapping[str, google.cloud.bigquery_migration_v2.types.MigrationTask]): - The tasks in a workflow in a named map. The - name (i.e. key) has no meaning and is merely a - convenient way to address a specific task in a - workflow. - state (google.cloud.bigquery_migration_v2.types.MigrationWorkflow.State): - Output only. That status of the workflow. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the workflow was created. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the workflow was last updated. - """ - class State(proto.Enum): - r"""Possible migration workflow states. - - Values: - STATE_UNSPECIFIED (0): - Workflow state is unspecified. - DRAFT (1): - Workflow is in draft status, i.e. tasks are - not yet eligible for execution. - RUNNING (2): - Workflow is running (i.e. tasks are eligible - for execution). - PAUSED (3): - Workflow is paused. Tasks currently in - progress may continue, but no further tasks will - be scheduled. - COMPLETED (4): - Workflow is complete. There should not be any - task in a non-terminal state, but if they are - (e.g. forced termination), they will not be - scheduled. - """ - STATE_UNSPECIFIED = 0 - DRAFT = 1 - RUNNING = 2 - PAUSED = 3 - COMPLETED = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - tasks: MutableMapping[str, 'MigrationTask'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message='MigrationTask', - ) - state: State = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class MigrationTask(proto.Message): - r"""A single task for a migration which has details about the - configuration of the task. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation_config_details (google.cloud.bigquery_migration_v2.types.TranslationConfigDetails): - Task configuration for CW Batch/Offline SQL - Translation. - - This field is a member of `oneof`_ ``task_details``. - translation_details (google.cloud.bigquery_migration_v2.types.TranslationDetails): - Task details for unified SQL Translation. - - This field is a member of `oneof`_ ``task_details``. - id (str): - Output only. Immutable. The unique identifier - for the migration task. The ID is - server-generated. - type_ (str): - The type of the task. This must be one of the supported task - types: Translation_Teradata2BQ, Translation_Redshift2BQ, - Translation_Bteq2BQ, Translation_Oracle2BQ, - Translation_HiveQL2BQ, Translation_SparkSQL2BQ, - Translation_Snowflake2BQ, Translation_Netezza2BQ, - Translation_AzureSynapse2BQ, Translation_Vertica2BQ, - Translation_SQLServer2BQ, Translation_Presto2BQ, - Translation_MySQL2BQ, Translation_Postgresql2BQ, - Translation_SQLite2BQ, Translation_Greenplum2BQ. - state (google.cloud.bigquery_migration_v2.types.MigrationTask.State): - Output only. The current state of the task. - processing_error (google.rpc.error_details_pb2.ErrorInfo): - Output only. An explanation that may be - populated when the task is in FAILED state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the task was created. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the task was last updated. - resource_error_details (MutableSequence[google.cloud.bigquery_migration_v2.types.ResourceErrorDetail]): - Output only. Provides details to errors and - issues encountered while processing the task. - Presence of error details does not mean that the - task failed. - resource_error_count (int): - The number or resources with errors. Note: This is not the - total number of errors as each resource can have more than - one error. This is used to indicate truncation by having a - ``resource_error_count`` that is higher than the size of - ``resource_error_details``. - metrics (MutableSequence[google.cloud.bigquery_migration_v2.types.TimeSeries]): - The metrics for the task. - task_result (google.cloud.bigquery_migration_v2.types.MigrationTaskResult): - Output only. The result of the task. - total_processing_error_count (int): - Count of all the processing errors in this - task and its subtasks. - total_resource_error_count (int): - Count of all the resource errors in this task - and its subtasks. - """ - class State(proto.Enum): - r"""Possible states of a migration task. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - PENDING (1): - The task is waiting for orchestration. - ORCHESTRATING (2): - The task is assigned to an orchestrator. - RUNNING (3): - The task is running, i.e. its subtasks are - ready for execution. - PAUSED (4): - Tha task is paused. Assigned subtasks can - continue, but no new subtasks will be scheduled. - SUCCEEDED (5): - The task finished successfully. - FAILED (6): - The task finished unsuccessfully. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - ORCHESTRATING = 2 - RUNNING = 3 - PAUSED = 4 - SUCCEEDED = 5 - FAILED = 6 - - translation_config_details: translation_config.TranslationConfigDetails = proto.Field( - proto.MESSAGE, - number=14, - oneof='task_details', - message=translation_config.TranslationConfigDetails, - ) - translation_details: gcbm_translation_details.TranslationDetails = proto.Field( - proto.MESSAGE, - number=16, - oneof='task_details', - message=gcbm_translation_details.TranslationDetails, - ) - id: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - processing_error: error_details_pb2.ErrorInfo = proto.Field( - proto.MESSAGE, - number=5, - message=error_details_pb2.ErrorInfo, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - resource_error_details: MutableSequence[migration_error_details.ResourceErrorDetail] = proto.RepeatedField( - proto.MESSAGE, - number=17, - message=migration_error_details.ResourceErrorDetail, - ) - resource_error_count: int = proto.Field( - proto.INT32, - number=18, - ) - metrics: MutableSequence[migration_metrics.TimeSeries] = proto.RepeatedField( - proto.MESSAGE, - number=19, - message=migration_metrics.TimeSeries, - ) - task_result: 'MigrationTaskResult' = proto.Field( - proto.MESSAGE, - number=20, - message='MigrationTaskResult', - ) - total_processing_error_count: int = proto.Field( - proto.INT32, - number=21, - ) - total_resource_error_count: int = proto.Field( - proto.INT32, - number=22, - ) - - -class MigrationSubtask(proto.Message): - r"""A subtask for a migration which carries details about the - configuration of the subtask. The content of the details should - not matter to the end user, but is a contract between the - subtask creator and subtask worker. - - Attributes: - name (str): - Output only. Immutable. The resource name for the migration - subtask. The ID is server-generated. - - Example: - ``projects/123/locations/us/workflows/345/subtasks/678`` - task_id (str): - The unique ID of the task to which this - subtask belongs. - type_ (str): - The type of the Subtask. The migration - service does not check whether this is a known - type. It is up to the task creator (i.e. - orchestrator or worker) to ensure it only - creates subtasks for which there are compatible - workers polling for Subtasks. - state (google.cloud.bigquery_migration_v2.types.MigrationSubtask.State): - Output only. The current state of the - subtask. - processing_error (google.rpc.error_details_pb2.ErrorInfo): - Output only. An explanation that may be - populated when the task is in FAILED state. - resource_error_details (MutableSequence[google.cloud.bigquery_migration_v2.types.ResourceErrorDetail]): - Output only. Provides details to errors and - issues encountered while processing the subtask. - Presence of error details does not mean that the - subtask failed. - resource_error_count (int): - The number or resources with errors. Note: This is not the - total number of errors as each resource can have more than - one error. This is used to indicate truncation by having a - ``resource_error_count`` that is higher than the size of - ``resource_error_details``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the subtask was created. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the subtask was last updated. - metrics (MutableSequence[google.cloud.bigquery_migration_v2.types.TimeSeries]): - The metrics for the subtask. - """ - class State(proto.Enum): - r"""Possible states of a migration subtask. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - ACTIVE (1): - The subtask is ready, i.e. it is ready for - execution. - RUNNING (2): - The subtask is running, i.e. it is assigned - to a worker for execution. - SUCCEEDED (3): - The subtask finished successfully. - FAILED (4): - The subtask finished unsuccessfully. - PAUSED (5): - The subtask is paused, i.e., it will not be - scheduled. If it was already assigned,it might - still finish but no new lease renewals will be - granted. - PENDING_DEPENDENCY (6): - The subtask is pending a dependency. It will - be scheduled once its dependencies are done. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - RUNNING = 2 - SUCCEEDED = 3 - FAILED = 4 - PAUSED = 5 - PENDING_DEPENDENCY = 6 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - task_id: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=3, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - processing_error: error_details_pb2.ErrorInfo = proto.Field( - proto.MESSAGE, - number=6, - message=error_details_pb2.ErrorInfo, - ) - resource_error_details: MutableSequence[migration_error_details.ResourceErrorDetail] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=migration_error_details.ResourceErrorDetail, - ) - resource_error_count: int = proto.Field( - proto.INT32, - number=13, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - metrics: MutableSequence[migration_metrics.TimeSeries] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=migration_metrics.TimeSeries, - ) - - -class MigrationTaskResult(proto.Message): - r"""The migration task result. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - translation_task_result (google.cloud.bigquery_migration_v2.types.TranslationTaskResult): - Details specific to translation task types. - - This field is a member of `oneof`_ ``details``. - """ - - translation_task_result: 'TranslationTaskResult' = proto.Field( - proto.MESSAGE, - number=2, - oneof='details', - message='TranslationTaskResult', - ) - - -class TranslationTaskResult(proto.Message): - r"""Translation specific result details from the migration task. - - Attributes: - translated_literals (MutableSequence[google.cloud.bigquery_migration_v2.types.Literal]): - The list of the translated literals. - report_log_messages (MutableSequence[google.cloud.bigquery_migration_v2.types.GcsReportLogMessage]): - The records from the aggregate CSV report for - a migration workflow. - """ - - translated_literals: MutableSequence[gcbm_translation_details.Literal] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=gcbm_translation_details.Literal, - ) - report_log_messages: MutableSequence[translation_usability.GcsReportLogMessage] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=translation_usability.GcsReportLogMessage, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_error_details.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_error_details.py deleted file mode 100644 index eefa55c92ad8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_error_details.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.rpc import error_details_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'ResourceErrorDetail', - 'ErrorDetail', - 'ErrorLocation', - }, -) - - -class ResourceErrorDetail(proto.Message): - r"""Provides details for errors and the corresponding resources. - - Attributes: - resource_info (google.rpc.error_details_pb2.ResourceInfo): - Required. Information about the resource - where the error is located. - error_details (MutableSequence[google.cloud.bigquery_migration_v2.types.ErrorDetail]): - Required. The error details for the resource. - error_count (int): - Required. How many errors there are in total for the - resource. Truncation can be indicated by having an - ``error_count`` that is higher than the size of - ``error_details``. - """ - - resource_info: error_details_pb2.ResourceInfo = proto.Field( - proto.MESSAGE, - number=1, - message=error_details_pb2.ResourceInfo, - ) - error_details: MutableSequence['ErrorDetail'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ErrorDetail', - ) - error_count: int = proto.Field( - proto.INT32, - number=3, - ) - - -class ErrorDetail(proto.Message): - r"""Provides details for errors, e.g. issues that where - encountered when processing a subtask. - - Attributes: - location (google.cloud.bigquery_migration_v2.types.ErrorLocation): - Optional. The exact location within the - resource (if applicable). - error_info (google.rpc.error_details_pb2.ErrorInfo): - Required. Describes the cause of the error - with structured detail. - """ - - location: 'ErrorLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='ErrorLocation', - ) - error_info: error_details_pb2.ErrorInfo = proto.Field( - proto.MESSAGE, - number=2, - message=error_details_pb2.ErrorInfo, - ) - - -class ErrorLocation(proto.Message): - r"""Holds information about where the error is located. - - Attributes: - line (int): - Optional. If applicable, denotes the line - where the error occurred. A zero value means - that there is no line information. - column (int): - Optional. If applicable, denotes the column - where the error occurred. A zero value means - that there is no columns information. - """ - - line: int = proto.Field( - proto.INT32, - number=1, - ) - column: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_metrics.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_metrics.py deleted file mode 100644 index fe2ae7fb9077..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_metrics.py +++ /dev/null @@ -1,213 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.api import distribution_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'TimeSeries', - 'Point', - 'TimeInterval', - 'TypedValue', - }, -) - - -class TimeSeries(proto.Message): - r"""The metrics object for a SubTask. - - Attributes: - metric (str): - Required. The name of the metric. - - If the metric is not known by the service yet, - it will be auto-created. - value_type (google.api.metric_pb2.ValueType): - Required. The value type of the time series. - metric_kind (google.api.metric_pb2.MetricKind): - Optional. The metric kind of the time series. - - If present, it must be the same as the metric kind of the - associated metric. If the associated metric's descriptor - must be auto-created, then this field specifies the metric - kind of the new descriptor and must be either ``GAUGE`` (the - default) or ``CUMULATIVE``. - points (MutableSequence[google.cloud.bigquery_migration_v2.types.Point]): - Required. The data points of this time series. When listing - time series, points are returned in reverse time order. - - When creating a time series, this field must contain exactly - one point and the point's type must be the same as the value - type of the associated metric. If the associated metric's - descriptor must be auto-created, then the value type of the - descriptor is determined by the point's type, which must be - ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. - """ - - metric: str = proto.Field( - proto.STRING, - number=1, - ) - value_type: metric_pb2.MetricDescriptor.ValueType = proto.Field( - proto.ENUM, - number=2, - enum=metric_pb2.MetricDescriptor.ValueType, - ) - metric_kind: metric_pb2.MetricDescriptor.MetricKind = proto.Field( - proto.ENUM, - number=3, - enum=metric_pb2.MetricDescriptor.MetricKind, - ) - points: MutableSequence['Point'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Point', - ) - - -class Point(proto.Message): - r"""A single data point in a time series. - - Attributes: - interval (google.cloud.bigquery_migration_v2.types.TimeInterval): - The time interval to which the data point applies. For - ``GAUGE`` metrics, the start time does not need to be - supplied, but if it is supplied, it must equal the end time. - For ``DELTA`` metrics, the start and end time should specify - a non-zero interval, with subsequent points specifying - contiguous and non-overlapping intervals. For ``CUMULATIVE`` - metrics, the start and end time should specify a non-zero - interval, with subsequent points specifying the same start - time and increasing end times, until an event resets the - cumulative value to zero and sets a new start time for the - following points. - value (google.cloud.bigquery_migration_v2.types.TypedValue): - The value of the data point. - """ - - interval: 'TimeInterval' = proto.Field( - proto.MESSAGE, - number=1, - message='TimeInterval', - ) - value: 'TypedValue' = proto.Field( - proto.MESSAGE, - number=2, - message='TypedValue', - ) - - -class TimeInterval(proto.Message): - r"""A time interval extending just after a start time through an - end time. If the start time is the same as the end time, then - the interval represents a single point in time. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The beginning of the time interval. - The default value for the start time is the end - time. The start time must not be later than the - end time. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The end of the time interval. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class TypedValue(proto.Message): - r"""A single strongly-typed value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bool_value (bool): - A Boolean value: ``true`` or ``false``. - - This field is a member of `oneof`_ ``value``. - int64_value (int): - A 64-bit integer. Its range is approximately - ``+/-9.2x10^18``. - - This field is a member of `oneof`_ ``value``. - double_value (float): - A 64-bit double-precision floating-point number. Its - magnitude is approximately ``+/-10^(+/-300)`` and it has 16 - significant digits of precision. - - This field is a member of `oneof`_ ``value``. - string_value (str): - A variable-length string value. - - This field is a member of `oneof`_ ``value``. - distribution_value (google.api.distribution_pb2.Distribution): - A distribution value. - - This field is a member of `oneof`_ ``value``. - """ - - bool_value: bool = proto.Field( - proto.BOOL, - number=1, - oneof='value', - ) - int64_value: int = proto.Field( - proto.INT64, - number=2, - oneof='value', - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=3, - oneof='value', - ) - string_value: str = proto.Field( - proto.STRING, - number=4, - oneof='value', - ) - distribution_value: distribution_pb2.Distribution = proto.Field( - proto.MESSAGE, - number=5, - oneof='value', - message=distribution_pb2.Distribution, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_service.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_service.py deleted file mode 100644 index 7d2c07adf4c1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/migration_service.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'CreateMigrationWorkflowRequest', - 'GetMigrationWorkflowRequest', - 'ListMigrationWorkflowsRequest', - 'ListMigrationWorkflowsResponse', - 'DeleteMigrationWorkflowRequest', - 'StartMigrationWorkflowRequest', - 'GetMigrationSubtaskRequest', - 'ListMigrationSubtasksRequest', - 'ListMigrationSubtasksResponse', - }, -) - - -class CreateMigrationWorkflowRequest(proto.Message): - r"""Request to create a migration workflow resource. - - Attributes: - parent (str): - Required. The name of the project to which this migration - workflow belongs. Example: ``projects/foo/locations/bar`` - migration_workflow (google.cloud.bigquery_migration_v2.types.MigrationWorkflow): - Required. The migration workflow to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - migration_workflow: migration_entities.MigrationWorkflow = proto.Field( - proto.MESSAGE, - number=2, - message=migration_entities.MigrationWorkflow, - ) - - -class GetMigrationWorkflowRequest(proto.Message): - r"""A request to get a previously created migration workflow. - - Attributes: - name (str): - Required. The unique identifier for the migration workflow. - Example: ``projects/123/locations/us/workflows/1234`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be retrieved. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListMigrationWorkflowsRequest(proto.Message): - r"""A request to list previously created migration workflows. - - Attributes: - parent (str): - Required. The project and location of the migration - workflows to list. Example: ``projects/123/locations/us`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be retrieved. - page_size (int): - The maximum number of migration workflows to - return. The service may return fewer than this - number. - page_token (str): - A page token, received from previous - ``ListMigrationWorkflows`` call. Provide this to retrieve - the subsequent page. - - When paginating, all other parameters provided to - ``ListMigrationWorkflows`` must match the call that provided - the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListMigrationWorkflowsResponse(proto.Message): - r"""Response object for a ``ListMigrationWorkflows`` call. - - Attributes: - migration_workflows (MutableSequence[google.cloud.bigquery_migration_v2.types.MigrationWorkflow]): - The migration workflows for the specified - project / location. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - migration_workflows: MutableSequence[migration_entities.MigrationWorkflow] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=migration_entities.MigrationWorkflow, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteMigrationWorkflowRequest(proto.Message): - r"""A request to delete a previously created migration workflow. - - Attributes: - name (str): - Required. The unique identifier for the migration workflow. - Example: ``projects/123/locations/us/workflows/1234`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StartMigrationWorkflowRequest(proto.Message): - r"""A request to start a previously created migration workflow. - - Attributes: - name (str): - Required. The unique identifier for the migration workflow. - Example: ``projects/123/locations/us/workflows/1234`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetMigrationSubtaskRequest(proto.Message): - r"""A request to get a previously created migration subtasks. - - Attributes: - name (str): - Required. The unique identifier for the migration subtask. - Example: - ``projects/123/locations/us/workflows/1234/subtasks/543`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The list of fields to be retrieved. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListMigrationSubtasksRequest(proto.Message): - r"""A request to list previously created migration subtasks. - - Attributes: - parent (str): - Required. The migration task of the subtasks to list. - Example: ``projects/123/locations/us/workflows/1234`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The list of fields to be retrieved. - page_size (int): - Optional. The maximum number of migration - tasks to return. The service may return fewer - than this number. - page_token (str): - Optional. A page token, received from previous - ``ListMigrationSubtasks`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListMigrationSubtasks`` must match the call that provided - the page token. - filter (str): - Optional. The filter to apply. This can be used to get the - subtasks of a specific tasks in a workflow, e.g. - ``migration_task = "ab012"`` where ``"ab012"`` is the task - ID (not the name in the named map). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListMigrationSubtasksResponse(proto.Message): - r"""Response object for a ``ListMigrationSubtasks`` call. - - Attributes: - migration_subtasks (MutableSequence[google.cloud.bigquery_migration_v2.types.MigrationSubtask]): - The migration subtasks for the specified - task. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - migration_subtasks: MutableSequence[migration_entities.MigrationSubtask] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=migration_entities.MigrationSubtask, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_config.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_config.py deleted file mode 100644 index 8cb7546352ff..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_config.py +++ /dev/null @@ -1,622 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'TranslationConfigDetails', - 'Dialect', - 'BigQueryDialect', - 'HiveQLDialect', - 'RedshiftDialect', - 'TeradataDialect', - 'OracleDialect', - 'SparkSQLDialect', - 'SnowflakeDialect', - 'NetezzaDialect', - 'AzureSynapseDialect', - 'VerticaDialect', - 'SQLServerDialect', - 'PostgresqlDialect', - 'PrestoDialect', - 'MySQLDialect', - 'DB2Dialect', - 'SQLiteDialect', - 'GreenplumDialect', - 'ObjectNameMappingList', - 'ObjectNameMapping', - 'NameMappingKey', - 'NameMappingValue', - 'SourceEnv', - }, -) - - -class TranslationConfigDetails(proto.Message): - r"""The translation config to capture necessary settings for a - translation task and subtask. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - gcs_source_path (str): - The Cloud Storage path for a directory of - files to translate in a task. - - This field is a member of `oneof`_ ``source_location``. - gcs_target_path (str): - The Cloud Storage path to write back the - corresponding input files to. - - This field is a member of `oneof`_ ``target_location``. - name_mapping_list (google.cloud.bigquery_migration_v2.types.ObjectNameMappingList): - The mapping of objects to their desired - output names in list form. - - This field is a member of `oneof`_ ``output_name_mapping``. - source_dialect (google.cloud.bigquery_migration_v2.types.Dialect): - The dialect of the input files. - target_dialect (google.cloud.bigquery_migration_v2.types.Dialect): - The target dialect for the engine to - translate the input to. - source_env (google.cloud.bigquery_migration_v2.types.SourceEnv): - The default source environment values for the - translation. - request_source (str): - The indicator to show translation request - initiator. - target_types (MutableSequence[str]): - The types of output to generate, e.g. sql, - metadata etc. If not specified, a default set of - targets will be generated. Some additional - target types may be slower to generate. See the - documentation for the set of available target - types. - """ - - gcs_source_path: str = proto.Field( - proto.STRING, - number=1, - oneof='source_location', - ) - gcs_target_path: str = proto.Field( - proto.STRING, - number=2, - oneof='target_location', - ) - name_mapping_list: 'ObjectNameMappingList' = proto.Field( - proto.MESSAGE, - number=5, - oneof='output_name_mapping', - message='ObjectNameMappingList', - ) - source_dialect: 'Dialect' = proto.Field( - proto.MESSAGE, - number=3, - message='Dialect', - ) - target_dialect: 'Dialect' = proto.Field( - proto.MESSAGE, - number=4, - message='Dialect', - ) - source_env: 'SourceEnv' = proto.Field( - proto.MESSAGE, - number=6, - message='SourceEnv', - ) - request_source: str = proto.Field( - proto.STRING, - number=8, - ) - target_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=9, - ) - - -class Dialect(proto.Message): - r"""The possible dialect options for translation. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bigquery_dialect (google.cloud.bigquery_migration_v2.types.BigQueryDialect): - The BigQuery dialect - - This field is a member of `oneof`_ ``dialect_value``. - hiveql_dialect (google.cloud.bigquery_migration_v2.types.HiveQLDialect): - The HiveQL dialect - - This field is a member of `oneof`_ ``dialect_value``. - redshift_dialect (google.cloud.bigquery_migration_v2.types.RedshiftDialect): - The Redshift dialect - - This field is a member of `oneof`_ ``dialect_value``. - teradata_dialect (google.cloud.bigquery_migration_v2.types.TeradataDialect): - The Teradata dialect - - This field is a member of `oneof`_ ``dialect_value``. - oracle_dialect (google.cloud.bigquery_migration_v2.types.OracleDialect): - The Oracle dialect - - This field is a member of `oneof`_ ``dialect_value``. - sparksql_dialect (google.cloud.bigquery_migration_v2.types.SparkSQLDialect): - The SparkSQL dialect - - This field is a member of `oneof`_ ``dialect_value``. - snowflake_dialect (google.cloud.bigquery_migration_v2.types.SnowflakeDialect): - The Snowflake dialect - - This field is a member of `oneof`_ ``dialect_value``. - netezza_dialect (google.cloud.bigquery_migration_v2.types.NetezzaDialect): - The Netezza dialect - - This field is a member of `oneof`_ ``dialect_value``. - azure_synapse_dialect (google.cloud.bigquery_migration_v2.types.AzureSynapseDialect): - The Azure Synapse dialect - - This field is a member of `oneof`_ ``dialect_value``. - vertica_dialect (google.cloud.bigquery_migration_v2.types.VerticaDialect): - The Vertica dialect - - This field is a member of `oneof`_ ``dialect_value``. - sql_server_dialect (google.cloud.bigquery_migration_v2.types.SQLServerDialect): - The SQL Server dialect - - This field is a member of `oneof`_ ``dialect_value``. - postgresql_dialect (google.cloud.bigquery_migration_v2.types.PostgresqlDialect): - The Postgresql dialect - - This field is a member of `oneof`_ ``dialect_value``. - presto_dialect (google.cloud.bigquery_migration_v2.types.PrestoDialect): - The Presto dialect - - This field is a member of `oneof`_ ``dialect_value``. - mysql_dialect (google.cloud.bigquery_migration_v2.types.MySQLDialect): - The MySQL dialect - - This field is a member of `oneof`_ ``dialect_value``. - db2_dialect (google.cloud.bigquery_migration_v2.types.DB2Dialect): - DB2 dialect - - This field is a member of `oneof`_ ``dialect_value``. - sqlite_dialect (google.cloud.bigquery_migration_v2.types.SQLiteDialect): - SQLite dialect - - This field is a member of `oneof`_ ``dialect_value``. - greenplum_dialect (google.cloud.bigquery_migration_v2.types.GreenplumDialect): - Greenplum dialect - - This field is a member of `oneof`_ ``dialect_value``. - """ - - bigquery_dialect: 'BigQueryDialect' = proto.Field( - proto.MESSAGE, - number=1, - oneof='dialect_value', - message='BigQueryDialect', - ) - hiveql_dialect: 'HiveQLDialect' = proto.Field( - proto.MESSAGE, - number=2, - oneof='dialect_value', - message='HiveQLDialect', - ) - redshift_dialect: 'RedshiftDialect' = proto.Field( - proto.MESSAGE, - number=3, - oneof='dialect_value', - message='RedshiftDialect', - ) - teradata_dialect: 'TeradataDialect' = proto.Field( - proto.MESSAGE, - number=4, - oneof='dialect_value', - message='TeradataDialect', - ) - oracle_dialect: 'OracleDialect' = proto.Field( - proto.MESSAGE, - number=5, - oneof='dialect_value', - message='OracleDialect', - ) - sparksql_dialect: 'SparkSQLDialect' = proto.Field( - proto.MESSAGE, - number=6, - oneof='dialect_value', - message='SparkSQLDialect', - ) - snowflake_dialect: 'SnowflakeDialect' = proto.Field( - proto.MESSAGE, - number=7, - oneof='dialect_value', - message='SnowflakeDialect', - ) - netezza_dialect: 'NetezzaDialect' = proto.Field( - proto.MESSAGE, - number=8, - oneof='dialect_value', - message='NetezzaDialect', - ) - azure_synapse_dialect: 'AzureSynapseDialect' = proto.Field( - proto.MESSAGE, - number=9, - oneof='dialect_value', - message='AzureSynapseDialect', - ) - vertica_dialect: 'VerticaDialect' = proto.Field( - proto.MESSAGE, - number=10, - oneof='dialect_value', - message='VerticaDialect', - ) - sql_server_dialect: 'SQLServerDialect' = proto.Field( - proto.MESSAGE, - number=11, - oneof='dialect_value', - message='SQLServerDialect', - ) - postgresql_dialect: 'PostgresqlDialect' = proto.Field( - proto.MESSAGE, - number=12, - oneof='dialect_value', - message='PostgresqlDialect', - ) - presto_dialect: 'PrestoDialect' = proto.Field( - proto.MESSAGE, - number=13, - oneof='dialect_value', - message='PrestoDialect', - ) - mysql_dialect: 'MySQLDialect' = proto.Field( - proto.MESSAGE, - number=14, - oneof='dialect_value', - message='MySQLDialect', - ) - db2_dialect: 'DB2Dialect' = proto.Field( - proto.MESSAGE, - number=15, - oneof='dialect_value', - message='DB2Dialect', - ) - sqlite_dialect: 'SQLiteDialect' = proto.Field( - proto.MESSAGE, - number=16, - oneof='dialect_value', - message='SQLiteDialect', - ) - greenplum_dialect: 'GreenplumDialect' = proto.Field( - proto.MESSAGE, - number=17, - oneof='dialect_value', - message='GreenplumDialect', - ) - - -class BigQueryDialect(proto.Message): - r"""The dialect definition for BigQuery. - """ - - -class HiveQLDialect(proto.Message): - r"""The dialect definition for HiveQL. - """ - - -class RedshiftDialect(proto.Message): - r"""The dialect definition for Redshift. - """ - - -class TeradataDialect(proto.Message): - r"""The dialect definition for Teradata. - - Attributes: - mode (google.cloud.bigquery_migration_v2.types.TeradataDialect.Mode): - Which Teradata sub-dialect mode the user - specifies. - """ - class Mode(proto.Enum): - r"""The sub-dialect options for Teradata. - - Values: - MODE_UNSPECIFIED (0): - Unspecified mode. - SQL (1): - Teradata SQL mode. - BTEQ (2): - BTEQ mode (which includes SQL). - """ - MODE_UNSPECIFIED = 0 - SQL = 1 - BTEQ = 2 - - mode: Mode = proto.Field( - proto.ENUM, - number=1, - enum=Mode, - ) - - -class OracleDialect(proto.Message): - r"""The dialect definition for Oracle. - """ - - -class SparkSQLDialect(proto.Message): - r"""The dialect definition for SparkSQL. - """ - - -class SnowflakeDialect(proto.Message): - r"""The dialect definition for Snowflake. - """ - - -class NetezzaDialect(proto.Message): - r"""The dialect definition for Netezza. - """ - - -class AzureSynapseDialect(proto.Message): - r"""The dialect definition for Azure Synapse. - """ - - -class VerticaDialect(proto.Message): - r"""The dialect definition for Vertica. - """ - - -class SQLServerDialect(proto.Message): - r"""The dialect definition for SQL Server. - """ - - -class PostgresqlDialect(proto.Message): - r"""The dialect definition for Postgresql. - """ - - -class PrestoDialect(proto.Message): - r"""The dialect definition for Presto. - """ - - -class MySQLDialect(proto.Message): - r"""The dialect definition for MySQL. - """ - - -class DB2Dialect(proto.Message): - r"""The dialect definition for DB2. - """ - - -class SQLiteDialect(proto.Message): - r"""The dialect definition for SQLite. - """ - - -class GreenplumDialect(proto.Message): - r"""The dialect definition for Greenplum. - """ - - -class ObjectNameMappingList(proto.Message): - r"""Represents a map of name mappings using a list of key:value - proto messages of existing name to desired output name. - - Attributes: - name_map (MutableSequence[google.cloud.bigquery_migration_v2.types.ObjectNameMapping]): - The elements of the object name map. - """ - - name_map: MutableSequence['ObjectNameMapping'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ObjectNameMapping', - ) - - -class ObjectNameMapping(proto.Message): - r"""Represents a key-value pair of NameMappingKey to - NameMappingValue to represent the mapping of SQL names from the - input value to desired output. - - Attributes: - source (google.cloud.bigquery_migration_v2.types.NameMappingKey): - The name of the object in source that is - being mapped. - target (google.cloud.bigquery_migration_v2.types.NameMappingValue): - The desired target name of the object that is - being mapped. - """ - - source: 'NameMappingKey' = proto.Field( - proto.MESSAGE, - number=1, - message='NameMappingKey', - ) - target: 'NameMappingValue' = proto.Field( - proto.MESSAGE, - number=2, - message='NameMappingValue', - ) - - -class NameMappingKey(proto.Message): - r"""The potential components of a full name mapping that will be - mapped during translation in the source data warehouse. - - Attributes: - type_ (google.cloud.bigquery_migration_v2.types.NameMappingKey.Type): - The type of object that is being mapped. - database (str): - The database name (BigQuery project ID - equivalent in the source data warehouse). - schema (str): - The schema name (BigQuery dataset equivalent - in the source data warehouse). - relation (str): - The relation name (BigQuery table or view - equivalent in the source data warehouse). - attribute (str): - The attribute name (BigQuery column - equivalent in the source data warehouse). - """ - class Type(proto.Enum): - r"""The type of the object that is being mapped. - - Values: - TYPE_UNSPECIFIED (0): - Unspecified name mapping type. - DATABASE (1): - The object being mapped is a database. - SCHEMA (2): - The object being mapped is a schema. - RELATION (3): - The object being mapped is a relation. - ATTRIBUTE (4): - The object being mapped is an attribute. - RELATION_ALIAS (5): - The object being mapped is a relation alias. - ATTRIBUTE_ALIAS (6): - The object being mapped is a an attribute - alias. - FUNCTION (7): - The object being mapped is a function. - """ - TYPE_UNSPECIFIED = 0 - DATABASE = 1 - SCHEMA = 2 - RELATION = 3 - ATTRIBUTE = 4 - RELATION_ALIAS = 5 - ATTRIBUTE_ALIAS = 6 - FUNCTION = 7 - - type_: Type = proto.Field( - proto.ENUM, - number=1, - enum=Type, - ) - database: str = proto.Field( - proto.STRING, - number=2, - ) - schema: str = proto.Field( - proto.STRING, - number=3, - ) - relation: str = proto.Field( - proto.STRING, - number=4, - ) - attribute: str = proto.Field( - proto.STRING, - number=5, - ) - - -class NameMappingValue(proto.Message): - r"""The potential components of a full name mapping that will be - mapped during translation in the target data warehouse. - - Attributes: - database (str): - The database name (BigQuery project ID - equivalent in the target data warehouse). - schema (str): - The schema name (BigQuery dataset equivalent - in the target data warehouse). - relation (str): - The relation name (BigQuery table or view - equivalent in the target data warehouse). - attribute (str): - The attribute name (BigQuery column - equivalent in the target data warehouse). - """ - - database: str = proto.Field( - proto.STRING, - number=1, - ) - schema: str = proto.Field( - proto.STRING, - number=2, - ) - relation: str = proto.Field( - proto.STRING, - number=3, - ) - attribute: str = proto.Field( - proto.STRING, - number=4, - ) - - -class SourceEnv(proto.Message): - r"""Represents the default source environment values for the - translation. - - Attributes: - default_database (str): - The default database name to fully qualify - SQL objects when their database name is missing. - schema_search_path (MutableSequence[str]): - The schema search path. When SQL objects are - missing schema name, translation engine will - search through this list to find the value. - metadata_store_dataset (str): - Optional. Expects a valid BigQuery dataset ID that exists, - e.g., project-123.metadata_store_123. If specified, - translation will search and read the required schema - information from a metadata store in this dataset. If - metadata store doesn't exist, translation will parse the - metadata file and upload the schema info to a temp table in - the dataset to speed up future translation jobs. - """ - - default_database: str = proto.Field( - proto.STRING, - number=1, - ) - schema_search_path: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - metadata_store_dataset: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_details.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_details.py deleted file mode 100644 index 3c9314c979cf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_details.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'TranslationDetails', - 'SourceTargetMapping', - 'SourceSpec', - 'TargetSpec', - 'Literal', - 'SourceEnvironment', - }, -) - - -class TranslationDetails(proto.Message): - r"""The translation details to capture the necessary settings for - a translation job. - - Attributes: - source_target_mapping (MutableSequence[google.cloud.bigquery_migration_v2.types.SourceTargetMapping]): - The mapping from source to target SQL. - target_base_uri (str): - The base URI for all writes to persistent - storage. - source_environment (google.cloud.bigquery_migration_v2.types.SourceEnvironment): - The default source environment values for the - translation. - target_return_literals (MutableSequence[str]): - The list of literal targets that will be directly returned - to the response. Each entry consists of the constructed - path, EXCLUDING the base path. Not providing a - target_base_uri will prevent writing to persistent storage. - target_types (MutableSequence[str]): - The types of output to generate, e.g. sql, metadata, - lineage_from_sql_scripts, etc. If not specified, a default - set of targets will be generated. Some additional target - types may be slower to generate. See the documentation for - the set of available target types. - """ - - source_target_mapping: MutableSequence['SourceTargetMapping'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='SourceTargetMapping', - ) - target_base_uri: str = proto.Field( - proto.STRING, - number=2, - ) - source_environment: 'SourceEnvironment' = proto.Field( - proto.MESSAGE, - number=3, - message='SourceEnvironment', - ) - target_return_literals: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - target_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class SourceTargetMapping(proto.Message): - r"""Represents one mapping from a source SQL to a target SQL. - - Attributes: - source_spec (google.cloud.bigquery_migration_v2.types.SourceSpec): - The source SQL or the path to it. - target_spec (google.cloud.bigquery_migration_v2.types.TargetSpec): - The target SQL or the path for it. - """ - - source_spec: 'SourceSpec' = proto.Field( - proto.MESSAGE, - number=1, - message='SourceSpec', - ) - target_spec: 'TargetSpec' = proto.Field( - proto.MESSAGE, - number=2, - message='TargetSpec', - ) - - -class SourceSpec(proto.Message): - r"""Represents one path to the location that holds source data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - base_uri (str): - The base URI for all files to be read in as - sources for translation. - - This field is a member of `oneof`_ ``source``. - literal (google.cloud.bigquery_migration_v2.types.Literal): - Source literal. - - This field is a member of `oneof`_ ``source``. - encoding (str): - Optional. The optional field to specify the - encoding of the sql bytes. - """ - - base_uri: str = proto.Field( - proto.STRING, - number=1, - oneof='source', - ) - literal: 'Literal' = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='Literal', - ) - encoding: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TargetSpec(proto.Message): - r"""Represents one path to the location that holds target data. - - Attributes: - relative_path (str): - The relative path for the target data. Given source file - ``base_uri/input/sql``, the output would be - ``target_base_uri/sql/relative_path/input.sql``. - """ - - relative_path: str = proto.Field( - proto.STRING, - number=1, - ) - - -class Literal(proto.Message): - r"""Literal data. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - literal_string (str): - Literal string data. - - This field is a member of `oneof`_ ``literal_data``. - literal_bytes (bytes): - Literal byte data. - - This field is a member of `oneof`_ ``literal_data``. - relative_path (str): - Required. The identifier of the literal - entry. - """ - - literal_string: str = proto.Field( - proto.STRING, - number=2, - oneof='literal_data', - ) - literal_bytes: bytes = proto.Field( - proto.BYTES, - number=3, - oneof='literal_data', - ) - relative_path: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SourceEnvironment(proto.Message): - r"""Represents the default source environment values for the - translation. - - Attributes: - default_database (str): - The default database name to fully qualify - SQL objects when their database name is missing. - schema_search_path (MutableSequence[str]): - The schema search path. When SQL objects are - missing schema name, translation engine will - search through this list to find the value. - metadata_store_dataset (str): - Optional. Expects a validQ BigQuery dataset ID that exists, - e.g., project-123.metadata_store_123. If specified, - translation will search and read the required schema - information from a metadata store in this dataset. If - metadata store doesn't exist, translation will parse the - metadata file and upload the schema info to a temp table in - the dataset to speed up future translation jobs. - """ - - default_database: str = proto.Field( - proto.STRING, - number=1, - ) - schema_search_path: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - metadata_store_dataset: str = proto.Field( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_suggestion.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_suggestion.py deleted file mode 100644 index 6623516d71eb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_suggestion.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'TranslationReportRecord', - }, -) - - -class TranslationReportRecord(proto.Message): - r"""Details about a record. - - Attributes: - severity (google.cloud.bigquery_migration_v2.types.TranslationReportRecord.Severity): - Severity of the translation record. - script_line (int): - Specifies the row from the source text where - the error occurred (0 based). Example: 2 - script_column (int): - Specifies the column from the source texts - where the error occurred. (0 based) example: 6 - category (str): - Category of the error/warning. Example: - SyntaxError - message (str): - Detailed message of the record. - """ - class Severity(proto.Enum): - r"""The severity type of the record. - - Values: - SEVERITY_UNSPECIFIED (0): - SeverityType not specified. - INFO (1): - INFO type. - WARNING (2): - WARNING type. The translated query may still - provide useful information if all the report - records are WARNING. - ERROR (3): - ERROR type. Translation failed. - """ - SEVERITY_UNSPECIFIED = 0 - INFO = 1 - WARNING = 2 - ERROR = 3 - - severity: Severity = proto.Field( - proto.ENUM, - number=1, - enum=Severity, - ) - script_line: int = proto.Field( - proto.INT32, - number=2, - ) - script_column: int = proto.Field( - proto.INT32, - number=3, - ) - category: str = proto.Field( - proto.STRING, - number=4, - ) - message: str = proto.Field( - proto.STRING, - number=5, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_usability.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_usability.py deleted file mode 100644 index 047c80d93b13..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/google/cloud/bigquery_migration_v2/types/translation_usability.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2', - manifest={ - 'GcsReportLogMessage', - }, -) - - -class GcsReportLogMessage(proto.Message): - r"""A record in the aggregate CSV report for a migration workflow - - Attributes: - severity (str): - Severity of the translation record. - category (str): - Category of the error/warning. Example: - SyntaxError - file_path (str): - The file path in which the error occurred - filename (str): - The file name in which the error occurred - source_script_line (int): - Specifies the row from the source text where - the error occurred (0 based, -1 for messages - without line location). Example: 2 - source_script_column (int): - Specifies the column from the source texts - where the error occurred. (0 based, -1 for - messages without column location) example: 6 - message (str): - Detailed message of the record. - script_context (str): - The script context (obfuscated) in which the - error occurred - action (str): - Category of the error/warning. Example: - SyntaxError - effect (str): - Effect of the error/warning. Example: - COMPATIBILITY - object_name (str): - Name of the affected object in the log - message. - """ - - severity: str = proto.Field( - proto.STRING, - number=1, - ) - category: str = proto.Field( - proto.STRING, - number=2, - ) - file_path: str = proto.Field( - proto.STRING, - number=3, - ) - filename: str = proto.Field( - proto.STRING, - number=4, - ) - source_script_line: int = proto.Field( - proto.INT32, - number=5, - ) - source_script_column: int = proto.Field( - proto.INT32, - number=6, - ) - message: str = proto.Field( - proto.STRING, - number=7, - ) - script_context: str = proto.Field( - proto.STRING, - number=8, - ) - action: str = proto.Field( - proto.STRING, - number=9, - ) - effect: str = proto.Field( - proto.STRING, - number=10, - ) - object_name: str = proto.Field( - proto.STRING, - number=11, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/mypy.ini b/owl-bot-staging/google-cloud-bigquery-migration/v2/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/noxfile.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/noxfile.py deleted file mode 100644 index c7b3d6e229ed..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-migration' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_migration_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_migration_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py deleted file mode 100644 index c2eefb7ed8a0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py deleted file mode 100644 index 25a4f6f525e9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py deleted file mode 100644 index 4d1e97a3e0a7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.delete_migration_workflow(request=request) - - -# [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py deleted file mode 100644 index 0237d6acbd10..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.delete_migration_workflow(request=request) - - -# [END bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py deleted file mode 100644 index 815e87e9aa6d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationSubtask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_subtask(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py deleted file mode 100644 index 5b9b19a3679b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationSubtask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_subtask(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py deleted file mode 100644 index a05ff64dab0e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py deleted file mode 100644 index 75a214df8290..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py deleted file mode 100644 index 45bf77c9be8a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationSubtasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py deleted file mode 100644 index 3347d21c2b01..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationSubtasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py deleted file mode 100644 index 8acebf58b624..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationWorkflows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py deleted file mode 100644 index bd91de11fa16..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationWorkflows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py deleted file mode 100644 index 915f9076db50..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -async def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.start_migration_workflow(request=request) - - -# [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py deleted file mode 100644 index 6809593e52f8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2 - - -def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.start_migration_workflow(request=request) - - -# [END bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json deleted file mode 100644 index 07e44d919e85..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json +++ /dev/null @@ -1,1138 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2", - "version": "v2" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-migration", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.create_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "CreateMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_workflow", - "type": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", - "shortName": "create_migration_workflow" - }, - "description": "Sample for CreateMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.create_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.CreateMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "CreateMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.CreateMigrationWorkflowRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_workflow", - "type": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", - "shortName": "create_migration_workflow" - }, - "description": "Sample for CreateMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_CreateMigrationWorkflow_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_create_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.delete_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "DeleteMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_migration_workflow" - }, - "description": "Sample for DeleteMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.delete_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.DeleteMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "DeleteMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.DeleteMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_migration_workflow" - }, - "description": "Sample for DeleteMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_DeleteMigrationWorkflow_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_delete_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.get_migration_subtask", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationSubtask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.types.MigrationSubtask", - "shortName": "get_migration_subtask" - }, - "description": "Sample for GetMigrationSubtask", - "file": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.get_migration_subtask", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationSubtask", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationSubtask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.GetMigrationSubtaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.types.MigrationSubtask", - "shortName": "get_migration_subtask" - }, - "description": "Sample for GetMigrationSubtask", - "file": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationSubtask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_get_migration_subtask_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.get_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", - "shortName": "get_migration_workflow" - }, - "description": "Sample for GetMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.get_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.GetMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.GetMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.types.MigrationWorkflow", - "shortName": "get_migration_workflow" - }, - "description": "Sample for GetMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_GetMigrationWorkflow_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_get_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.list_migration_subtasks", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationSubtasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksAsyncPager", - "shortName": "list_migration_subtasks" - }, - "description": "Sample for ListMigrationSubtasks", - "file": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.list_migration_subtasks", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationSubtasks", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationSubtasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.ListMigrationSubtasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationSubtasksPager", - "shortName": "list_migration_subtasks" - }, - "description": "Sample for ListMigrationSubtasks", - "file": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationSubtasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_list_migration_subtasks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.list_migration_workflows", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationWorkflows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager", - "shortName": "list_migration_workflows" - }, - "description": "Sample for ListMigrationWorkflows", - "file": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.list_migration_workflows", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.ListMigrationWorkflows", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationWorkflows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.ListMigrationWorkflowsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2.services.migration_service.pagers.ListMigrationWorkflowsPager", - "shortName": "list_migration_workflows" - }, - "description": "Sample for ListMigrationWorkflows", - "file": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_ListMigrationWorkflows_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_list_migration_workflows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceAsyncClient.start_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "StartMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "start_migration_workflow" - }, - "description": "Sample for StartMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2.MigrationServiceClient.start_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService.StartMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "StartMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2.types.StartMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "start_migration_workflow" - }, - "description": "Sample for StartMigrationWorkflow", - "file": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2_generated_MigrationService_StartMigrationWorkflow_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2_generated_migration_service_start_migration_workflow_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/scripts/fixup_bigquery_migration_v2_keywords.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/scripts/fixup_bigquery_migration_v2_keywords.py deleted file mode 100644 index c0eca0ff15fe..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/scripts/fixup_bigquery_migration_v2_keywords.py +++ /dev/null @@ -1,182 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_migrationCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_migration_workflow': ('parent', 'migration_workflow', ), - 'delete_migration_workflow': ('name', ), - 'get_migration_subtask': ('name', 'read_mask', ), - 'get_migration_workflow': ('name', 'read_mask', ), - 'list_migration_subtasks': ('parent', 'read_mask', 'page_size', 'page_token', 'filter', ), - 'list_migration_workflows': ('parent', 'read_mask', 'page_size', 'page_token', ), - 'start_migration_workflow': ('name', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_migrationCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_migration client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/setup.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/setup.py deleted file mode 100644 index 142b19d72660..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-migration' - - -description = "Google Cloud Bigquery Migration API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_migration/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/bigquery_migration_v2/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/bigquery_migration_v2/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/bigquery_migration_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py b/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py deleted file mode 100644 index 2a1a89974719..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2/tests/unit/gapic/bigquery_migration_v2/test_migration_service.py +++ /dev/null @@ -1,4388 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api import distribution_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_migration_v2.services.migration_service import MigrationServiceAsyncClient -from google.cloud.bigquery_migration_v2.services.migration_service import MigrationServiceClient -from google.cloud.bigquery_migration_v2.services.migration_service import pagers -from google.cloud.bigquery_migration_v2.services.migration_service import transports -from google.cloud.bigquery_migration_v2.types import migration_entities -from google.cloud.bigquery_migration_v2.types import migration_error_details -from google.cloud.bigquery_migration_v2.types import migration_metrics -from google.cloud.bigquery_migration_v2.types import migration_service -from google.cloud.bigquery_migration_v2.types import translation_config -from google.cloud.bigquery_migration_v2.types import translation_details -from google.cloud.bigquery_migration_v2.types import translation_usability -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MigrationServiceClient._get_default_mtls_endpoint(None) is None - assert MigrationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MigrationServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MigrationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MigrationServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MigrationServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - MigrationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert MigrationServiceClient._get_client_cert_source(None, False) is None - assert MigrationServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MigrationServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MigrationServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MigrationServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - default_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert MigrationServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - assert MigrationServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MigrationServiceClient._get_api_endpoint(None, None, default_universe, "always") == MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - assert MigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - assert MigrationServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MigrationServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - MigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert MigrationServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MigrationServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MigrationServiceClient._get_universe_domain(None, None) == MigrationServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - MigrationServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = MigrationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = MigrationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (MigrationServiceClient, "grpc"), - (MigrationServiceAsyncClient, "grpc_asyncio"), -]) -def test_migration_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MigrationServiceGrpcTransport, "grpc"), - (transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_migration_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MigrationServiceClient, "grpc"), - (MigrationServiceAsyncClient, "grpc_asyncio"), -]) -def test_migration_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - - -def test_migration_service_client_get_transport_class(): - transport = MigrationServiceClient.get_transport_class() - available_transports = [ - transports.MigrationServiceGrpcTransport, - ] - assert transport in available_transports - - transport = MigrationServiceClient.get_transport_class("grpc") - assert transport == transports.MigrationServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -def test_migration_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MigrationServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MigrationServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", "true"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", "false"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_migration_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MigrationServiceClient, MigrationServiceAsyncClient -]) -@mock.patch.object(MigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MigrationServiceAsyncClient)) -def test_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - MigrationServiceClient, MigrationServiceAsyncClient -]) -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -def test_migration_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - default_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_migration_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", grpc_helpers), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_migration_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_migration_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MigrationServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", grpc_helpers), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_migration_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigquerymigration.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigquerymigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.CreateMigrationWorkflowRequest, - dict, -]) -def test_create_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - ) - response = client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.CreateMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -def test_create_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.CreateMigrationWorkflowRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.CreateMigrationWorkflowRequest( - parent='parent_value', - ) - -def test_create_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_migration_workflow] = mock_rpc - request = {} - client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_migration_workflow] = mock_rpc - - request = {} - await client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.CreateMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - response = await client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.CreateMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -@pytest.mark.asyncio -async def test_create_migration_workflow_async_from_dict(): - await test_create_migration_workflow_async(request_type=dict) - -def test_create_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.CreateMigrationWorkflowRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.CreateMigrationWorkflowRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - await client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_migration_workflow( - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].migration_workflow - mock_val = migration_entities.MigrationWorkflow(name='name_value') - assert arg == mock_val - - -def test_create_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_migration_workflow( - migration_service.CreateMigrationWorkflowRequest(), - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_migration_workflow( - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].migration_workflow - mock_val = migration_entities.MigrationWorkflow(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_migration_workflow( - migration_service.CreateMigrationWorkflowRequest(), - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.GetMigrationWorkflowRequest, - dict, -]) -def test_get_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - ) - response = client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -def test_get_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.GetMigrationWorkflowRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.GetMigrationWorkflowRequest( - name='name_value', - ) - -def test_get_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_migration_workflow] = mock_rpc - request = {} - client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_migration_workflow] = mock_rpc - - request = {} - await client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.GetMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - response = await client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -@pytest.mark.asyncio -async def test_get_migration_workflow_async_from_dict(): - await test_get_migration_workflow_async(request_type=dict) - -def test_get_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - await client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_migration_workflow( - migration_service.GetMigrationWorkflowRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_migration_workflow( - migration_service.GetMigrationWorkflowRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.ListMigrationWorkflowsRequest, - dict, -]) -def test_list_migration_workflows(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationWorkflowsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationWorkflowsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationWorkflowsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_migration_workflows_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.ListMigrationWorkflowsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_migration_workflows(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.ListMigrationWorkflowsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_migration_workflows_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_migration_workflows in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_migration_workflows] = mock_rpc - request = {} - client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_migration_workflows(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_migration_workflows in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_migration_workflows] = mock_rpc - - request = {} - await client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_migration_workflows(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_workflows_async(transport: str = 'grpc_asyncio', request_type=migration_service.ListMigrationWorkflowsRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationWorkflowsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationWorkflowsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_from_dict(): - await test_list_migration_workflows_async(request_type=dict) - -def test_list_migration_workflows_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationWorkflowsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value = migration_service.ListMigrationWorkflowsResponse() - client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_migration_workflows_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationWorkflowsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse()) - await client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_migration_workflows_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationWorkflowsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_migration_workflows( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_migration_workflows_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_migration_workflows( - migration_service.ListMigrationWorkflowsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_migration_workflows_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationWorkflowsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_migration_workflows( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_migration_workflows_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_migration_workflows( - migration_service.ListMigrationWorkflowsRequest(), - parent='parent_value', - ) - - -def test_list_migration_workflows_pager(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_migration_workflows(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, migration_entities.MigrationWorkflow) - for i in results) -def test_list_migration_workflows_pages(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - pages = list(client.list_migration_workflows(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_pager(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_migration_workflows(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, migration_entities.MigrationWorkflow) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_pages(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_migration_workflows(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - migration_service.DeleteMigrationWorkflowRequest, - dict, -]) -def test_delete_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.DeleteMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.DeleteMigrationWorkflowRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.DeleteMigrationWorkflowRequest( - name='name_value', - ) - -def test_delete_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_migration_workflow] = mock_rpc - request = {} - client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_migration_workflow] = mock_rpc - - request = {} - await client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.DeleteMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.DeleteMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_migration_workflow_async_from_dict(): - await test_delete_migration_workflow_async(request_type=dict) - -def test_delete_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.DeleteMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value = None - client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.DeleteMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_migration_workflow( - migration_service.DeleteMigrationWorkflowRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_migration_workflow( - migration_service.DeleteMigrationWorkflowRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.StartMigrationWorkflowRequest, - dict, -]) -def test_start_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.StartMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_start_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.StartMigrationWorkflowRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.start_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.StartMigrationWorkflowRequest( - name='name_value', - ) - -def test_start_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.start_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.start_migration_workflow] = mock_rpc - request = {} - client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.start_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.start_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.start_migration_workflow] = mock_rpc - - request = {} - await client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.start_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.StartMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.StartMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_start_migration_workflow_async_from_dict(): - await test_start_migration_workflow_async(request_type=dict) - -def test_start_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.StartMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value = None - client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_start_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.StartMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_start_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.start_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_start_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.start_migration_workflow( - migration_service.StartMigrationWorkflowRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_start_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.start_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_start_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.start_migration_workflow( - migration_service.StartMigrationWorkflowRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.GetMigrationSubtaskRequest, - dict, -]) -def test_get_migration_subtask(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationSubtask( - name='name_value', - task_id='task_id_value', - type_='type__value', - state=migration_entities.MigrationSubtask.State.ACTIVE, - resource_error_count=2169, - ) - response = client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationSubtaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationSubtask) - assert response.name == 'name_value' - assert response.task_id == 'task_id_value' - assert response.type_ == 'type__value' - assert response.state == migration_entities.MigrationSubtask.State.ACTIVE - assert response.resource_error_count == 2169 - - -def test_get_migration_subtask_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.GetMigrationSubtaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_migration_subtask(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.GetMigrationSubtaskRequest( - name='name_value', - ) - -def test_get_migration_subtask_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_migration_subtask in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_migration_subtask] = mock_rpc - request = {} - client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_migration_subtask(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_subtask_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_migration_subtask in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_migration_subtask] = mock_rpc - - request = {} - await client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_migration_subtask(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_subtask_async(transport: str = 'grpc_asyncio', request_type=migration_service.GetMigrationSubtaskRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask( - name='name_value', - task_id='task_id_value', - type_='type__value', - state=migration_entities.MigrationSubtask.State.ACTIVE, - resource_error_count=2169, - )) - response = await client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationSubtaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationSubtask) - assert response.name == 'name_value' - assert response.task_id == 'task_id_value' - assert response.type_ == 'type__value' - assert response.state == migration_entities.MigrationSubtask.State.ACTIVE - assert response.resource_error_count == 2169 - - -@pytest.mark.asyncio -async def test_get_migration_subtask_async_from_dict(): - await test_get_migration_subtask_async(request_type=dict) - -def test_get_migration_subtask_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationSubtaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value = migration_entities.MigrationSubtask() - client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_migration_subtask_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationSubtaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask()) - await client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_migration_subtask_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationSubtask() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_migration_subtask( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_migration_subtask_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_migration_subtask( - migration_service.GetMigrationSubtaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_migration_subtask_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationSubtask() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_migration_subtask( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_migration_subtask_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_migration_subtask( - migration_service.GetMigrationSubtaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.ListMigrationSubtasksRequest, - dict, -]) -def test_list_migration_subtasks(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationSubtasksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationSubtasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationSubtasksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_migration_subtasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.ListMigrationSubtasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_migration_subtasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.ListMigrationSubtasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_migration_subtasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_migration_subtasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_migration_subtasks] = mock_rpc - request = {} - client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_migration_subtasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_migration_subtasks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_migration_subtasks] = mock_rpc - - request = {} - await client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_migration_subtasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async(transport: str = 'grpc_asyncio', request_type=migration_service.ListMigrationSubtasksRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationSubtasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationSubtasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_from_dict(): - await test_list_migration_subtasks_async(request_type=dict) - -def test_list_migration_subtasks_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationSubtasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value = migration_service.ListMigrationSubtasksResponse() - client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_migration_subtasks_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationSubtasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse()) - await client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_migration_subtasks_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationSubtasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_migration_subtasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_migration_subtasks_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_migration_subtasks( - migration_service.ListMigrationSubtasksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_migration_subtasks_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationSubtasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_migration_subtasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_migration_subtasks_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_migration_subtasks( - migration_service.ListMigrationSubtasksRequest(), - parent='parent_value', - ) - - -def test_list_migration_subtasks_pager(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_migration_subtasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, migration_entities.MigrationSubtask) - for i in results) -def test_list_migration_subtasks_pages(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - pages = list(client.list_migration_subtasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_pager(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_migration_subtasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, migration_entities.MigrationSubtask) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_pages(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_migration_subtasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MigrationServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MigrationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MigrationServiceGrpcTransport, - transports.MigrationServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = MigrationServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.create_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.CreateMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.get_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_migration_workflows_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value = migration_service.ListMigrationWorkflowsResponse() - client.list_migration_workflows(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationWorkflowsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value = None - client.delete_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.DeleteMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_start_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value = None - client.start_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.StartMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_migration_subtask_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value = migration_entities.MigrationSubtask() - client.get_migration_subtask(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationSubtaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_migration_subtasks_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value = migration_service.ListMigrationSubtasksResponse() - client.list_migration_subtasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationSubtasksRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = MigrationServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - await client.create_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.CreateMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - await client.get_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_migration_workflows_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse( - next_page_token='next_page_token_value', - )) - await client.list_migration_workflows(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationWorkflowsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.DeleteMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_start_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.start_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.StartMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_migration_subtask_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask( - name='name_value', - task_id='task_id_value', - type_='type__value', - state=migration_entities.MigrationSubtask.State.ACTIVE, - resource_error_count=2169, - )) - await client.get_migration_subtask(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationSubtaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_migration_subtasks_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse( - next_page_token='next_page_token_value', - )) - await client.list_migration_subtasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationSubtasksRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MigrationServiceGrpcTransport, - ) - -def test_migration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_migration_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_migration_workflow', - 'get_migration_workflow', - 'list_migration_workflows', - 'delete_migration_workflow', - 'start_migration_workflow', - 'get_migration_subtask', - 'list_migration_subtasks', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_migration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_migration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_migration_v2.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MigrationServiceTransport() - adc.assert_called_once() - - -def test_migration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MigrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MigrationServiceGrpcTransport, - transports.MigrationServiceGrpcAsyncIOTransport, - ], -) -def test_migration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MigrationServiceGrpcTransport, - transports.MigrationServiceGrpcAsyncIOTransport, - ], -) -def test_migration_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MigrationServiceGrpcTransport, grpc_helpers), - (transports.MigrationServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_migration_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigquerymigration.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigquerymigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MigrationServiceGrpcTransport, transports.MigrationServiceGrpcAsyncIOTransport]) -def test_migration_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_migration_service_host_no_port(transport_name): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerymigration.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_migration_service_host_with_port(transport_name): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerymigration.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:8000' - ) - -def test_migration_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_migration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MigrationServiceGrpcTransport, transports.MigrationServiceGrpcAsyncIOTransport]) -def test_migration_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MigrationServiceGrpcTransport, transports.MigrationServiceGrpcAsyncIOTransport]) -def test_migration_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_migration_subtask_path(): - project = "squid" - location = "clam" - workflow = "whelk" - subtask = "octopus" - expected = "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format(project=project, location=location, workflow=workflow, subtask=subtask, ) - actual = MigrationServiceClient.migration_subtask_path(project, location, workflow, subtask) - assert expected == actual - - -def test_parse_migration_subtask_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "workflow": "cuttlefish", - "subtask": "mussel", - } - path = MigrationServiceClient.migration_subtask_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_migration_subtask_path(path) - assert expected == actual - -def test_migration_workflow_path(): - project = "winkle" - location = "nautilus" - workflow = "scallop" - expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) - actual = MigrationServiceClient.migration_workflow_path(project, location, workflow) - assert expected == actual - - -def test_parse_migration_workflow_path(): - expected = { - "project": "abalone", - "location": "squid", - "workflow": "clam", - } - path = MigrationServiceClient.migration_workflow_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_migration_workflow_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MigrationServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = MigrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = MigrationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = MigrationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MigrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = MigrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = MigrationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = MigrationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MigrationServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = MigrationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MigrationServiceTransport, '_prep_wrapped_messages') as prep: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MigrationServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MigrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/.coveragerc b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/.coveragerc deleted file mode 100644 index c16fcf7df5d3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_migration/__init__.py - google/cloud/bigquery_migration/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/.flake8 b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/MANIFEST.in deleted file mode 100644 index 34c2eb3c2daa..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_migration *.py -recursive-include google/cloud/bigquery_migration_v2alpha *.py diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/README.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/README.rst deleted file mode 100644 index 0df3dd4690ca..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Migration API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Migration API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/migration_service.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/migration_service.rst deleted file mode 100644 index e676817164a0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/migration_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -MigrationService ----------------------------------- - -.. automodule:: google.cloud.bigquery_migration_v2alpha.services.migration_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/services_.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/services_.rst deleted file mode 100644 index 717bf41602e1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/services_.rst +++ /dev/null @@ -1,7 +0,0 @@ -Services for Google Cloud Bigquery Migration v2alpha API -======================================================== -.. toctree:: - :maxdepth: 2 - - migration_service - sql_translation_service diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/sql_translation_service.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/sql_translation_service.rst deleted file mode 100644 index 4d89dd21f8e2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/sql_translation_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -SqlTranslationService ---------------------------------------- - -.. automodule:: google.cloud.bigquery_migration_v2alpha.services.sql_translation_service - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/types_.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/types_.rst deleted file mode 100644 index 0260c71e2abb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/bigquery_migration_v2alpha/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Migration v2alpha API -===================================================== - -.. automodule:: google.cloud.bigquery_migration_v2alpha.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/conf.py deleted file mode 100644 index 88db521b0d7e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-migration documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-migration" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-migration-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-migration.tex", - u"google-cloud-bigquery-migration Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-migration", - u"Google Cloud Bigquery Migration Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-migration", - u"google-cloud-bigquery-migration Documentation", - author, - "google-cloud-bigquery-migration", - "GAPIC library for Google Cloud Bigquery Migration API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/index.rst deleted file mode 100644 index 8e9ae1b00ea9..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_migration_v2alpha/services_ - bigquery_migration_v2alpha/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/__init__.py deleted file mode 100644 index c73d69ad3681..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/__init__.py +++ /dev/null @@ -1,99 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_migration import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_migration_v2alpha.services.migration_service.client import MigrationServiceClient -from google.cloud.bigquery_migration_v2alpha.services.migration_service.async_client import MigrationServiceAsyncClient -from google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.client import SqlTranslationServiceClient -from google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.async_client import SqlTranslationServiceAsyncClient - -from google.cloud.bigquery_migration_v2alpha.types.assessment_task import AssessmentOrchestrationResultDetails -from google.cloud.bigquery_migration_v2alpha.types.assessment_task import AssessmentTaskDetails -from google.cloud.bigquery_migration_v2alpha.types.migration_entities import MigrationSubtask -from google.cloud.bigquery_migration_v2alpha.types.migration_entities import MigrationTask -from google.cloud.bigquery_migration_v2alpha.types.migration_entities import MigrationTaskOrchestrationResult -from google.cloud.bigquery_migration_v2alpha.types.migration_entities import MigrationWorkflow -from google.cloud.bigquery_migration_v2alpha.types.migration_error_details import ErrorDetail -from google.cloud.bigquery_migration_v2alpha.types.migration_error_details import ErrorLocation -from google.cloud.bigquery_migration_v2alpha.types.migration_error_details import ResourceErrorDetail -from google.cloud.bigquery_migration_v2alpha.types.migration_metrics import Point -from google.cloud.bigquery_migration_v2alpha.types.migration_metrics import TimeInterval -from google.cloud.bigquery_migration_v2alpha.types.migration_metrics import TimeSeries -from google.cloud.bigquery_migration_v2alpha.types.migration_metrics import TypedValue -from google.cloud.bigquery_migration_v2alpha.types.migration_service import CreateMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2alpha.types.migration_service import DeleteMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2alpha.types.migration_service import GetMigrationSubtaskRequest -from google.cloud.bigquery_migration_v2alpha.types.migration_service import GetMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2alpha.types.migration_service import ListMigrationSubtasksRequest -from google.cloud.bigquery_migration_v2alpha.types.migration_service import ListMigrationSubtasksResponse -from google.cloud.bigquery_migration_v2alpha.types.migration_service import ListMigrationWorkflowsRequest -from google.cloud.bigquery_migration_v2alpha.types.migration_service import ListMigrationWorkflowsResponse -from google.cloud.bigquery_migration_v2alpha.types.migration_service import StartMigrationWorkflowRequest -from google.cloud.bigquery_migration_v2alpha.types.translation_service import SqlTranslationError -from google.cloud.bigquery_migration_v2alpha.types.translation_service import SqlTranslationErrorDetail -from google.cloud.bigquery_migration_v2alpha.types.translation_service import SqlTranslationWarning -from google.cloud.bigquery_migration_v2alpha.types.translation_service import TranslateQueryRequest -from google.cloud.bigquery_migration_v2alpha.types.translation_service import TranslateQueryResponse -from google.cloud.bigquery_migration_v2alpha.types.translation_task import BteqOptions -from google.cloud.bigquery_migration_v2alpha.types.translation_task import DatasetReference -from google.cloud.bigquery_migration_v2alpha.types.translation_task import Filter -from google.cloud.bigquery_migration_v2alpha.types.translation_task import IdentifierSettings -from google.cloud.bigquery_migration_v2alpha.types.translation_task import TeradataOptions -from google.cloud.bigquery_migration_v2alpha.types.translation_task import TranslationFileMapping -from google.cloud.bigquery_migration_v2alpha.types.translation_task import TranslationTaskDetails - -__all__ = ('MigrationServiceClient', - 'MigrationServiceAsyncClient', - 'SqlTranslationServiceClient', - 'SqlTranslationServiceAsyncClient', - 'AssessmentOrchestrationResultDetails', - 'AssessmentTaskDetails', - 'MigrationSubtask', - 'MigrationTask', - 'MigrationTaskOrchestrationResult', - 'MigrationWorkflow', - 'ErrorDetail', - 'ErrorLocation', - 'ResourceErrorDetail', - 'Point', - 'TimeInterval', - 'TimeSeries', - 'TypedValue', - 'CreateMigrationWorkflowRequest', - 'DeleteMigrationWorkflowRequest', - 'GetMigrationSubtaskRequest', - 'GetMigrationWorkflowRequest', - 'ListMigrationSubtasksRequest', - 'ListMigrationSubtasksResponse', - 'ListMigrationWorkflowsRequest', - 'ListMigrationWorkflowsResponse', - 'StartMigrationWorkflowRequest', - 'SqlTranslationError', - 'SqlTranslationErrorDetail', - 'SqlTranslationWarning', - 'TranslateQueryRequest', - 'TranslateQueryResponse', - 'BteqOptions', - 'DatasetReference', - 'Filter', - 'IdentifierSettings', - 'TeradataOptions', - 'TranslationFileMapping', - 'TranslationTaskDetails', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/py.typed b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/py.typed deleted file mode 100644 index fcbd0b586dd2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-migration package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/__init__.py deleted file mode 100644 index 96265b3e5fd3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/__init__.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.migration_service import MigrationServiceClient -from .services.migration_service import MigrationServiceAsyncClient -from .services.sql_translation_service import SqlTranslationServiceClient -from .services.sql_translation_service import SqlTranslationServiceAsyncClient - -from .types.assessment_task import AssessmentOrchestrationResultDetails -from .types.assessment_task import AssessmentTaskDetails -from .types.migration_entities import MigrationSubtask -from .types.migration_entities import MigrationTask -from .types.migration_entities import MigrationTaskOrchestrationResult -from .types.migration_entities import MigrationWorkflow -from .types.migration_error_details import ErrorDetail -from .types.migration_error_details import ErrorLocation -from .types.migration_error_details import ResourceErrorDetail -from .types.migration_metrics import Point -from .types.migration_metrics import TimeInterval -from .types.migration_metrics import TimeSeries -from .types.migration_metrics import TypedValue -from .types.migration_service import CreateMigrationWorkflowRequest -from .types.migration_service import DeleteMigrationWorkflowRequest -from .types.migration_service import GetMigrationSubtaskRequest -from .types.migration_service import GetMigrationWorkflowRequest -from .types.migration_service import ListMigrationSubtasksRequest -from .types.migration_service import ListMigrationSubtasksResponse -from .types.migration_service import ListMigrationWorkflowsRequest -from .types.migration_service import ListMigrationWorkflowsResponse -from .types.migration_service import StartMigrationWorkflowRequest -from .types.translation_service import SqlTranslationError -from .types.translation_service import SqlTranslationErrorDetail -from .types.translation_service import SqlTranslationWarning -from .types.translation_service import TranslateQueryRequest -from .types.translation_service import TranslateQueryResponse -from .types.translation_task import BteqOptions -from .types.translation_task import DatasetReference -from .types.translation_task import Filter -from .types.translation_task import IdentifierSettings -from .types.translation_task import TeradataOptions -from .types.translation_task import TranslationFileMapping -from .types.translation_task import TranslationTaskDetails - -__all__ = ( - 'MigrationServiceAsyncClient', - 'SqlTranslationServiceAsyncClient', -'AssessmentOrchestrationResultDetails', -'AssessmentTaskDetails', -'BteqOptions', -'CreateMigrationWorkflowRequest', -'DatasetReference', -'DeleteMigrationWorkflowRequest', -'ErrorDetail', -'ErrorLocation', -'Filter', -'GetMigrationSubtaskRequest', -'GetMigrationWorkflowRequest', -'IdentifierSettings', -'ListMigrationSubtasksRequest', -'ListMigrationSubtasksResponse', -'ListMigrationWorkflowsRequest', -'ListMigrationWorkflowsResponse', -'MigrationServiceClient', -'MigrationSubtask', -'MigrationTask', -'MigrationTaskOrchestrationResult', -'MigrationWorkflow', -'Point', -'ResourceErrorDetail', -'SqlTranslationError', -'SqlTranslationErrorDetail', -'SqlTranslationServiceClient', -'SqlTranslationWarning', -'StartMigrationWorkflowRequest', -'TeradataOptions', -'TimeInterval', -'TimeSeries', -'TranslateQueryRequest', -'TranslateQueryResponse', -'TranslationFileMapping', -'TranslationTaskDetails', -'TypedValue', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/gapic_metadata.json deleted file mode 100644 index 9eb915ecf56d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/gapic_metadata.json +++ /dev/null @@ -1,117 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_migration_v2alpha", - "protoPackage": "google.cloud.bigquery.migration.v2alpha", - "schema": "1.0", - "services": { - "MigrationService": { - "clients": { - "grpc": { - "libraryClient": "MigrationServiceClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "create_migration_workflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "delete_migration_workflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "get_migration_subtask" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "get_migration_workflow" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "list_migration_subtasks" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "list_migration_workflows" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "start_migration_workflow" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MigrationServiceAsyncClient", - "rpcs": { - "CreateMigrationWorkflow": { - "methods": [ - "create_migration_workflow" - ] - }, - "DeleteMigrationWorkflow": { - "methods": [ - "delete_migration_workflow" - ] - }, - "GetMigrationSubtask": { - "methods": [ - "get_migration_subtask" - ] - }, - "GetMigrationWorkflow": { - "methods": [ - "get_migration_workflow" - ] - }, - "ListMigrationSubtasks": { - "methods": [ - "list_migration_subtasks" - ] - }, - "ListMigrationWorkflows": { - "methods": [ - "list_migration_workflows" - ] - }, - "StartMigrationWorkflow": { - "methods": [ - "start_migration_workflow" - ] - } - } - } - } - }, - "SqlTranslationService": { - "clients": { - "grpc": { - "libraryClient": "SqlTranslationServiceClient", - "rpcs": { - "TranslateQuery": { - "methods": [ - "translate_query" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SqlTranslationServiceAsyncClient", - "rpcs": { - "TranslateQuery": { - "methods": [ - "translate_query" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/py.typed b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/py.typed deleted file mode 100644 index fcbd0b586dd2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-migration package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/__init__.py deleted file mode 100644 index 9a438d91f688..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MigrationServiceClient -from .async_client import MigrationServiceAsyncClient - -__all__ = ( - 'MigrationServiceClient', - 'MigrationServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py deleted file mode 100644 index 2f10f6778218..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ /dev/null @@ -1,1039 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_error_details -from google.cloud.bigquery_migration_v2alpha.types import migration_metrics -from google.cloud.bigquery_migration_v2alpha.types import migration_service -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore -from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport -from .client import MigrationServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class MigrationServiceAsyncClient: - """Service to handle EDW migrations.""" - - _client: MigrationServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = MigrationServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = MigrationServiceClient._DEFAULT_UNIVERSE - - migration_subtask_path = staticmethod(MigrationServiceClient.migration_subtask_path) - parse_migration_subtask_path = staticmethod(MigrationServiceClient.parse_migration_subtask_path) - migration_workflow_path = staticmethod(MigrationServiceClient.migration_workflow_path) - parse_migration_workflow_path = staticmethod(MigrationServiceClient.parse_migration_workflow_path) - common_billing_account_path = staticmethod(MigrationServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MigrationServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(MigrationServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(MigrationServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(MigrationServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(MigrationServiceClient.parse_common_organization_path) - common_project_path = staticmethod(MigrationServiceClient.common_project_path) - parse_common_project_path = staticmethod(MigrationServiceClient.parse_common_project_path) - common_location_path = staticmethod(MigrationServiceClient.common_location_path) - parse_common_location_path = staticmethod(MigrationServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceAsyncClient: The constructed client. - """ - return MigrationServiceClient.from_service_account_info.__func__(MigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceAsyncClient: The constructed client. - """ - return MigrationServiceClient.from_service_account_file.__func__(MigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MigrationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MigrationServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = MigrationServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MigrationServiceTransport, Callable[..., MigrationServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the migration service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MigrationServiceTransport,Callable[..., MigrationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MigrationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MigrationServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.migration_v2alpha.MigrationServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "credentialsType": None, - } - ) - - async def create_migration_workflow(self, - request: Optional[Union[migration_service.CreateMigrationWorkflowRequest, dict]] = None, - *, - parent: Optional[str] = None, - migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Creates a migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]]): - The request object. Request to create a migration - workflow resource. - parent (:class:`str`): - Required. The name of the project to which this - migration workflow belongs. Example: - ``projects/foo/locations/bar`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_workflow (:class:`google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow`): - Required. The migration workflow to - create. - - This corresponds to the ``migration_workflow`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, migration_workflow] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.CreateMigrationWorkflowRequest): - request = migration_service.CreateMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if migration_workflow is not None: - request.migration_workflow = migration_workflow - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_migration_workflow(self, - request: Optional[Union[migration_service.GetMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Gets a previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest, dict]]): - The request object. A request to get a previously created - migration workflow. - name (:class:`str`): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationWorkflowRequest): - request = migration_service.GetMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_migration_workflows(self, - request: Optional[Union[migration_service.ListMigrationWorkflowsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationWorkflowsAsyncPager: - r"""Lists previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]]): - The request object. A request to list previously created - migration workflows. - parent (:class:`str`): - Required. The project and location of the migration - workflows to list. Example: - ``projects/123/locations/us`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager: - Response object for a ListMigrationWorkflows call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationWorkflowsRequest): - request = migration_service.ListMigrationWorkflowsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_migration_workflows] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMigrationWorkflowsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_migration_workflow(self, - request: Optional[Union[migration_service.DeleteMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a migration workflow by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.delete_migration_workflow(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]]): - The request object. A request to delete a previously - created migration workflow. - name (:class:`str`): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest): - request = migration_service.DeleteMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def start_migration_workflow(self, - request: Optional[Union[migration_service.StartMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.start_migration_workflow(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest, dict]]): - The request object. A request to start a previously - created migration workflow. - name (:class:`str`): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.StartMigrationWorkflowRequest): - request = migration_service.StartMigrationWorkflowRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.start_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_migration_subtask(self, - request: Optional[Union[migration_service.GetMigrationSubtaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationSubtask: - r"""Gets a previously created migration subtask. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_subtask(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest, dict]]): - The request object. A request to get a previously created - migration subtasks. - name (:class:`str`): - Required. The unique identifier for the migration - subtask. Example: - ``projects/123/locations/us/workflows/1234/subtasks/543`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask: - A subtask for a migration which - carries details about the configuration - of the subtask. The content of the - details should not matter to the end - user, but is a contract between the - subtask creator and subtask worker. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationSubtaskRequest): - request = migration_service.GetMigrationSubtaskRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_migration_subtask] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_migration_subtasks(self, - request: Optional[Union[migration_service.ListMigrationSubtasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationSubtasksAsyncPager: - r"""Lists previously created migration subtasks. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest, dict]]): - The request object. A request to list previously created - migration subtasks. - parent (:class:`str`): - Required. The migration task of the subtasks to list. - Example: ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksAsyncPager: - Response object for a ListMigrationSubtasks call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationSubtasksRequest): - request = migration_service.ListMigrationSubtasksRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_migration_subtasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListMigrationSubtasksAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "MigrationServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MigrationServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py deleted file mode 100644 index 6221b8fab10c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ /dev/null @@ -1,1401 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_error_details -from google.cloud.bigquery_migration_v2alpha.types import migration_metrics -from google.cloud.bigquery_migration_v2alpha.types import migration_service -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore -from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import MigrationServiceGrpcTransport -from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport - - -class MigrationServiceClientMeta(type): - """Metaclass for the MigrationService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MigrationServiceTransport]] - _transport_registry["grpc"] = MigrationServiceGrpcTransport - _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MigrationServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MigrationServiceClient(metaclass=MigrationServiceClientMeta): - """Service to handle EDW migrations.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigquerymigration.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MigrationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MigrationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - MigrationServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def migration_subtask_path(project: str,location: str,workflow: str,subtask: str,) -> str: - """Returns a fully-qualified migration_subtask string.""" - return "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format(project=project, location=location, workflow=workflow, subtask=subtask, ) - - @staticmethod - def parse_migration_subtask_path(path: str) -> Dict[str,str]: - """Parses a migration_subtask path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)/subtasks/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def migration_workflow_path(project: str,location: str,workflow: str,) -> str: - """Returns a fully-qualified migration_workflow string.""" - return "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) - - @staticmethod - def parse_migration_workflow_path(path: str) -> Dict[str,str]: - """Parses a migration_workflow path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = MigrationServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MigrationServiceTransport, Callable[..., MigrationServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the migration service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,MigrationServiceTransport,Callable[..., MigrationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the MigrationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MigrationServiceClient._read_environment_variables() - self._client_cert_source = MigrationServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = MigrationServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, MigrationServiceTransport) - if transport_provided: - # transport is a MigrationServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(MigrationServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - MigrationServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[MigrationServiceTransport], Callable[..., MigrationServiceTransport]] = ( - MigrationServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., MigrationServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.migration_v2alpha.MigrationServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "credentialsType": None, - } - ) - - def create_migration_workflow(self, - request: Optional[Union[migration_service.CreateMigrationWorkflowRequest, dict]] = None, - *, - parent: Optional[str] = None, - migration_workflow: Optional[migration_entities.MigrationWorkflow] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Creates a migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]): - The request object. Request to create a migration - workflow resource. - parent (str): - Required. The name of the project to which this - migration workflow belongs. Example: - ``projects/foo/locations/bar`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - migration_workflow (google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow): - Required. The migration workflow to - create. - - This corresponds to the ``migration_workflow`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, migration_workflow] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.CreateMigrationWorkflowRequest): - request = migration_service.CreateMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if migration_workflow is not None: - request.migration_workflow = migration_workflow - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_migration_workflow(self, - request: Optional[Union[migration_service.GetMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationWorkflow: - r"""Gets a previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_workflow(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest, dict]): - The request object. A request to get a previously created - migration workflow. - name (str): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow: - A migration workflow which specifies - what needs to be done for an EDW - migration. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationWorkflowRequest): - request = migration_service.GetMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_migration_workflows(self, - request: Optional[Union[migration_service.ListMigrationWorkflowsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationWorkflowsPager: - r"""Lists previously created migration workflow. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]): - The request object. A request to list previously created - migration workflows. - parent (str): - Required. The project and location of the migration - workflows to list. Example: - ``projects/123/locations/us`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsPager: - Response object for a ListMigrationWorkflows call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationWorkflowsRequest): - request = migration_service.ListMigrationWorkflowsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_migration_workflows] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMigrationWorkflowsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_migration_workflow(self, - request: Optional[Union[migration_service.DeleteMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a migration workflow by name. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.delete_migration_workflow(request=request) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]): - The request object. A request to delete a previously - created migration workflow. - name (str): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest): - request = migration_service.DeleteMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def start_migration_workflow(self, - request: Optional[Union[migration_service.StartMigrationWorkflowRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.start_migration_workflow(request=request) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest, dict]): - The request object. A request to start a previously - created migration workflow. - name (str): - Required. The unique identifier for the migration - workflow. Example: - ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.StartMigrationWorkflowRequest): - request = migration_service.StartMigrationWorkflowRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.start_migration_workflow] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_migration_subtask(self, - request: Optional[Union[migration_service.GetMigrationSubtaskRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> migration_entities.MigrationSubtask: - r"""Gets a previously created migration subtask. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_subtask(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest, dict]): - The request object. A request to get a previously created - migration subtasks. - name (str): - Required. The unique identifier for the migration - subtask. Example: - ``projects/123/locations/us/workflows/1234/subtasks/543`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask: - A subtask for a migration which - carries details about the configuration - of the subtask. The content of the - details should not matter to the end - user, but is a contract between the - subtask creator and subtask worker. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.GetMigrationSubtaskRequest): - request = migration_service.GetMigrationSubtaskRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_migration_subtask] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_migration_subtasks(self, - request: Optional[Union[migration_service.ListMigrationSubtasksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMigrationSubtasksPager: - r"""Lists previously created migration subtasks. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest, dict]): - The request object. A request to list previously created - migration subtasks. - parent (str): - Required. The migration task of the subtasks to list. - Example: ``projects/123/locations/us/workflows/1234`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksPager: - Response object for a ListMigrationSubtasks call. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, migration_service.ListMigrationSubtasksRequest): - request = migration_service.ListMigrationSubtasksRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_migration_subtasks] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListMigrationSubtasksPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "MigrationServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MigrationServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/pagers.py deleted file mode 100644 index 495608927a51..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/pagers.py +++ /dev/null @@ -1,306 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_service - - -class ListMigrationWorkflowsPager: - """A pager for iterating through ``list_migration_workflows`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``migration_workflows`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMigrationWorkflows`` requests and continue to iterate - through the ``migration_workflows`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., migration_service.ListMigrationWorkflowsResponse], - request: migration_service.ListMigrationWorkflowsRequest, - response: migration_service.ListMigrationWorkflowsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationWorkflowsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[migration_service.ListMigrationWorkflowsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[migration_entities.MigrationWorkflow]: - for page in self.pages: - yield from page.migration_workflows - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationWorkflowsAsyncPager: - """A pager for iterating through ``list_migration_workflows`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``migration_workflows`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMigrationWorkflows`` requests and continue to iterate - through the ``migration_workflows`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[migration_service.ListMigrationWorkflowsResponse]], - request: migration_service.ListMigrationWorkflowsRequest, - response: migration_service.ListMigrationWorkflowsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationWorkflowsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[migration_service.ListMigrationWorkflowsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[migration_entities.MigrationWorkflow]: - async def async_generator(): - async for page in self.pages: - for response in page.migration_workflows: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationSubtasksPager: - """A pager for iterating through ``list_migration_subtasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` object, and - provides an ``__iter__`` method to iterate through its - ``migration_subtasks`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListMigrationSubtasks`` requests and continue to iterate - through the ``migration_subtasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., migration_service.ListMigrationSubtasksResponse], - request: migration_service.ListMigrationSubtasksRequest, - response: migration_service.ListMigrationSubtasksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationSubtasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[migration_service.ListMigrationSubtasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[migration_entities.MigrationSubtask]: - for page in self.pages: - yield from page.migration_subtasks - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListMigrationSubtasksAsyncPager: - """A pager for iterating through ``list_migration_subtasks`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``migration_subtasks`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListMigrationSubtasks`` requests and continue to iterate - through the ``migration_subtasks`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[migration_service.ListMigrationSubtasksResponse]], - request: migration_service.ListMigrationSubtasksRequest, - response: migration_service.ListMigrationSubtasksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest): - The initial request object. - response (google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = migration_service.ListMigrationSubtasksRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[migration_service.ListMigrationSubtasksResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[migration_entities.MigrationSubtask]: - async def async_generator(): - async for page in self.pages: - for response in page.migration_subtasks: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/README.rst deleted file mode 100644 index ef5e0cb2c853..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`MigrationServiceTransport` is the ABC for all transports. -- public child `MigrationServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `MigrationServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseMigrationServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `MigrationServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/__init__.py deleted file mode 100644 index fc01e4daba77..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MigrationServiceTransport -from .grpc import MigrationServiceGrpcTransport -from .grpc_asyncio import MigrationServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MigrationServiceTransport]] -_transport_registry['grpc'] = MigrationServiceGrpcTransport -_transport_registry['grpc_asyncio'] = MigrationServiceGrpcAsyncIOTransport - -__all__ = ( - 'MigrationServiceTransport', - 'MigrationServiceGrpcTransport', - 'MigrationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py deleted file mode 100644 index c0ae33f541a7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py +++ /dev/null @@ -1,285 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_service -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MigrationServiceTransport(abc.ABC): - """Abstract transport class for MigrationService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigquerymigration.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_migration_workflow: gapic_v1.method.wrap_method( - self.create_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_workflow: gapic_v1.method.wrap_method( - self.get_migration_workflow, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.list_migration_workflows: gapic_v1.method.wrap_method( - self.list_migration_workflows, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.delete_migration_workflow: gapic_v1.method.wrap_method( - self.delete_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.start_migration_workflow: gapic_v1.method.wrap_method( - self.start_migration_workflow, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.get_migration_subtask: gapic_v1.method.wrap_method( - self.get_migration_subtask, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.list_migration_subtasks: gapic_v1.method.wrap_method( - self.list_migration_subtasks, - default_retry=retries.Retry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_migration_workflow(self) -> Callable[ - [migration_service.CreateMigrationWorkflowRequest], - Union[ - migration_entities.MigrationWorkflow, - Awaitable[migration_entities.MigrationWorkflow] - ]]: - raise NotImplementedError() - - @property - def get_migration_workflow(self) -> Callable[ - [migration_service.GetMigrationWorkflowRequest], - Union[ - migration_entities.MigrationWorkflow, - Awaitable[migration_entities.MigrationWorkflow] - ]]: - raise NotImplementedError() - - @property - def list_migration_workflows(self) -> Callable[ - [migration_service.ListMigrationWorkflowsRequest], - Union[ - migration_service.ListMigrationWorkflowsResponse, - Awaitable[migration_service.ListMigrationWorkflowsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_migration_workflow(self) -> Callable[ - [migration_service.DeleteMigrationWorkflowRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def start_migration_workflow(self) -> Callable[ - [migration_service.StartMigrationWorkflowRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_migration_subtask(self) -> Callable[ - [migration_service.GetMigrationSubtaskRequest], - Union[ - migration_entities.MigrationSubtask, - Awaitable[migration_entities.MigrationSubtask] - ]]: - raise NotImplementedError() - - @property - def list_migration_subtasks(self) -> Callable[ - [migration_service.ListMigrationSubtasksRequest], - Union[ - migration_service.ListMigrationSubtasksResponse, - Awaitable[migration_service.ListMigrationSubtasksResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MigrationServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py deleted file mode 100644 index 727203e466f5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py +++ /dev/null @@ -1,509 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_service -from google.protobuf import empty_pb2 # type: ignore -from .base import MigrationServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MigrationServiceGrpcTransport(MigrationServiceTransport): - """gRPC backend transport for MigrationService. - - Service to handle EDW migrations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_migration_workflow(self) -> Callable[ - [migration_service.CreateMigrationWorkflowRequest], - migration_entities.MigrationWorkflow]: - r"""Return a callable for the create migration workflow method over gRPC. - - Creates a migration workflow. - - Returns: - Callable[[~.CreateMigrationWorkflowRequest], - ~.MigrationWorkflow]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_migration_workflow' not in self._stubs: - self._stubs['create_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow', - request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['create_migration_workflow'] - - @property - def get_migration_workflow(self) -> Callable[ - [migration_service.GetMigrationWorkflowRequest], - migration_entities.MigrationWorkflow]: - r"""Return a callable for the get migration workflow method over gRPC. - - Gets a previously created migration workflow. - - Returns: - Callable[[~.GetMigrationWorkflowRequest], - ~.MigrationWorkflow]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_workflow' not in self._stubs: - self._stubs['get_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow', - request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['get_migration_workflow'] - - @property - def list_migration_workflows(self) -> Callable[ - [migration_service.ListMigrationWorkflowsRequest], - migration_service.ListMigrationWorkflowsResponse]: - r"""Return a callable for the list migration workflows method over gRPC. - - Lists previously created migration workflow. - - Returns: - Callable[[~.ListMigrationWorkflowsRequest], - ~.ListMigrationWorkflowsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_workflows' not in self._stubs: - self._stubs['list_migration_workflows'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows', - request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, - response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, - ) - return self._stubs['list_migration_workflows'] - - @property - def delete_migration_workflow(self) -> Callable[ - [migration_service.DeleteMigrationWorkflowRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete migration workflow method over gRPC. - - Deletes a migration workflow by name. - - Returns: - Callable[[~.DeleteMigrationWorkflowRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_migration_workflow' not in self._stubs: - self._stubs['delete_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow', - request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_migration_workflow'] - - @property - def start_migration_workflow(self) -> Callable[ - [migration_service.StartMigrationWorkflowRequest], - empty_pb2.Empty]: - r"""Return a callable for the start migration workflow method over gRPC. - - Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - Returns: - Callable[[~.StartMigrationWorkflowRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_migration_workflow' not in self._stubs: - self._stubs['start_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow', - request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['start_migration_workflow'] - - @property - def get_migration_subtask(self) -> Callable[ - [migration_service.GetMigrationSubtaskRequest], - migration_entities.MigrationSubtask]: - r"""Return a callable for the get migration subtask method over gRPC. - - Gets a previously created migration subtask. - - Returns: - Callable[[~.GetMigrationSubtaskRequest], - ~.MigrationSubtask]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_subtask' not in self._stubs: - self._stubs['get_migration_subtask'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask', - request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, - response_deserializer=migration_entities.MigrationSubtask.deserialize, - ) - return self._stubs['get_migration_subtask'] - - @property - def list_migration_subtasks(self) -> Callable[ - [migration_service.ListMigrationSubtasksRequest], - migration_service.ListMigrationSubtasksResponse]: - r"""Return a callable for the list migration subtasks method over gRPC. - - Lists previously created migration subtasks. - - Returns: - Callable[[~.ListMigrationSubtasksRequest], - ~.ListMigrationSubtasksResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_subtasks' not in self._stubs: - self._stubs['list_migration_subtasks'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks', - request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, - response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, - ) - return self._stubs['list_migration_subtasks'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MigrationServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py deleted file mode 100644 index 91cf62bf654f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,604 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_service -from google.protobuf import empty_pb2 # type: ignore -from .base import MigrationServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import MigrationServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class MigrationServiceGrpcAsyncIOTransport(MigrationServiceTransport): - """gRPC AsyncIO backend transport for MigrationService. - - Service to handle EDW migrations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_migration_workflow(self) -> Callable[ - [migration_service.CreateMigrationWorkflowRequest], - Awaitable[migration_entities.MigrationWorkflow]]: - r"""Return a callable for the create migration workflow method over gRPC. - - Creates a migration workflow. - - Returns: - Callable[[~.CreateMigrationWorkflowRequest], - Awaitable[~.MigrationWorkflow]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_migration_workflow' not in self._stubs: - self._stubs['create_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow', - request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['create_migration_workflow'] - - @property - def get_migration_workflow(self) -> Callable[ - [migration_service.GetMigrationWorkflowRequest], - Awaitable[migration_entities.MigrationWorkflow]]: - r"""Return a callable for the get migration workflow method over gRPC. - - Gets a previously created migration workflow. - - Returns: - Callable[[~.GetMigrationWorkflowRequest], - Awaitable[~.MigrationWorkflow]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_workflow' not in self._stubs: - self._stubs['get_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow', - request_serializer=migration_service.GetMigrationWorkflowRequest.serialize, - response_deserializer=migration_entities.MigrationWorkflow.deserialize, - ) - return self._stubs['get_migration_workflow'] - - @property - def list_migration_workflows(self) -> Callable[ - [migration_service.ListMigrationWorkflowsRequest], - Awaitable[migration_service.ListMigrationWorkflowsResponse]]: - r"""Return a callable for the list migration workflows method over gRPC. - - Lists previously created migration workflow. - - Returns: - Callable[[~.ListMigrationWorkflowsRequest], - Awaitable[~.ListMigrationWorkflowsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_workflows' not in self._stubs: - self._stubs['list_migration_workflows'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows', - request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize, - response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize, - ) - return self._stubs['list_migration_workflows'] - - @property - def delete_migration_workflow(self) -> Callable[ - [migration_service.DeleteMigrationWorkflowRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete migration workflow method over gRPC. - - Deletes a migration workflow by name. - - Returns: - Callable[[~.DeleteMigrationWorkflowRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_migration_workflow' not in self._stubs: - self._stubs['delete_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow', - request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_migration_workflow'] - - @property - def start_migration_workflow(self) -> Callable[ - [migration_service.StartMigrationWorkflowRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the start migration workflow method over gRPC. - - Starts a previously created migration workflow. I.e., - the state transitions from DRAFT to RUNNING. This is a - no-op if the state is already RUNNING. An error will be - signaled if the state is anything other than DRAFT or - RUNNING. - - Returns: - Callable[[~.StartMigrationWorkflowRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'start_migration_workflow' not in self._stubs: - self._stubs['start_migration_workflow'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow', - request_serializer=migration_service.StartMigrationWorkflowRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['start_migration_workflow'] - - @property - def get_migration_subtask(self) -> Callable[ - [migration_service.GetMigrationSubtaskRequest], - Awaitable[migration_entities.MigrationSubtask]]: - r"""Return a callable for the get migration subtask method over gRPC. - - Gets a previously created migration subtask. - - Returns: - Callable[[~.GetMigrationSubtaskRequest], - Awaitable[~.MigrationSubtask]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_migration_subtask' not in self._stubs: - self._stubs['get_migration_subtask'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask', - request_serializer=migration_service.GetMigrationSubtaskRequest.serialize, - response_deserializer=migration_entities.MigrationSubtask.deserialize, - ) - return self._stubs['get_migration_subtask'] - - @property - def list_migration_subtasks(self) -> Callable[ - [migration_service.ListMigrationSubtasksRequest], - Awaitable[migration_service.ListMigrationSubtasksResponse]]: - r"""Return a callable for the list migration subtasks method over gRPC. - - Lists previously created migration subtasks. - - Returns: - Callable[[~.ListMigrationSubtasksRequest], - Awaitable[~.ListMigrationSubtasksResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_migration_subtasks' not in self._stubs: - self._stubs['list_migration_subtasks'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks', - request_serializer=migration_service.ListMigrationSubtasksRequest.serialize, - response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize, - ) - return self._stubs['list_migration_subtasks'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_migration_workflow: self._wrap_method( - self.create_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.get_migration_workflow: self._wrap_method( - self.get_migration_workflow, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.list_migration_workflows: self._wrap_method( - self.list_migration_workflows, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.delete_migration_workflow: self._wrap_method( - self.delete_migration_workflow, - default_timeout=60.0, - client_info=client_info, - ), - self.start_migration_workflow: self._wrap_method( - self.start_migration_workflow, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.get_migration_subtask: self._wrap_method( - self.get_migration_subtask, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - self.list_migration_subtasks: self._wrap_method( - self.list_migration_subtasks, - default_retry=retries.AsyncRetry( - initial=1.0, - maximum=10.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=120.0, - ), - default_timeout=120.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'MigrationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/__init__.py deleted file mode 100644 index 42cdca346f76..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SqlTranslationServiceClient -from .async_client import SqlTranslationServiceAsyncClient - -__all__ = ( - 'SqlTranslationServiceClient', - 'SqlTranslationServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py deleted file mode 100644 index ae548242f463..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py +++ /dev/null @@ -1,385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import translation_service -from .transports.base import SqlTranslationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SqlTranslationServiceGrpcAsyncIOTransport -from .client import SqlTranslationServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class SqlTranslationServiceAsyncClient: - """Provides other SQL dialects to GoogleSQL translation - operations. - """ - - _client: SqlTranslationServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = SqlTranslationServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SqlTranslationServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = SqlTranslationServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = SqlTranslationServiceClient._DEFAULT_UNIVERSE - - common_billing_account_path = staticmethod(SqlTranslationServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SqlTranslationServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(SqlTranslationServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(SqlTranslationServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(SqlTranslationServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(SqlTranslationServiceClient.parse_common_organization_path) - common_project_path = staticmethod(SqlTranslationServiceClient.common_project_path) - parse_common_project_path = staticmethod(SqlTranslationServiceClient.parse_common_project_path) - common_location_path = staticmethod(SqlTranslationServiceClient.common_location_path) - parse_common_location_path = staticmethod(SqlTranslationServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SqlTranslationServiceAsyncClient: The constructed client. - """ - return SqlTranslationServiceClient.from_service_account_info.__func__(SqlTranslationServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SqlTranslationServiceAsyncClient: The constructed client. - """ - return SqlTranslationServiceClient.from_service_account_file.__func__(SqlTranslationServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return SqlTranslationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> SqlTranslationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - SqlTranslationServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = SqlTranslationServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SqlTranslationServiceTransport, Callable[..., SqlTranslationServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the sql translation service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,SqlTranslationServiceTransport,Callable[..., SqlTranslationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the SqlTranslationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SqlTranslationServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.migration_v2alpha.SqlTranslationServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "credentialsType": None, - } - ) - - async def translate_query(self, - request: Optional[Union[translation_service.TranslateQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - source_dialect: Optional[translation_service.TranslateQueryRequest.SqlTranslationSourceDialect] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> translation_service.TranslateQueryResponse: - r"""Translates input queries from source dialects to - GoogleSQL. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - async def sample_translate_query(): - # Create a client - client = bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.TranslateQueryRequest( - parent="parent_value", - source_dialect="TERADATA", - query="query_value", - ) - - # Make the request - response = await client.translate_query(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest, dict]]): - The request object. The request of translating a SQL - query to Standard SQL. - parent (:class:`str`): - Required. The name of the project to which this - translation request belongs. Example: - ``projects/foo/locations/bar`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source_dialect (:class:`google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect`): - Required. The source SQL dialect of ``queries``. - This corresponds to the ``source_dialect`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Required. The query to be translated. - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse: - The response of translating a SQL - query to Standard SQL. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, source_dialect, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, translation_service.TranslateQueryRequest): - request = translation_service.TranslateQueryRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if source_dialect is not None: - request.source_dialect = source_dialect - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.translate_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "SqlTranslationServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "SqlTranslationServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py deleted file mode 100644 index 7f0a49980874..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py +++ /dev/null @@ -1,735 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_migration_v2alpha.types import translation_service -from .transports.base import SqlTranslationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import SqlTranslationServiceGrpcTransport -from .transports.grpc_asyncio import SqlTranslationServiceGrpcAsyncIOTransport - - -class SqlTranslationServiceClientMeta(type): - """Metaclass for the SqlTranslationService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SqlTranslationServiceTransport]] - _transport_registry["grpc"] = SqlTranslationServiceGrpcTransport - _transport_registry["grpc_asyncio"] = SqlTranslationServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[SqlTranslationServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SqlTranslationServiceClient(metaclass=SqlTranslationServiceClientMeta): - """Provides other SQL dialects to GoogleSQL translation - operations. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigquerymigration.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SqlTranslationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SqlTranslationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SqlTranslationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - SqlTranslationServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = SqlTranslationServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = SqlTranslationServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = SqlTranslationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = SqlTranslationServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SqlTranslationServiceTransport, Callable[..., SqlTranslationServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the sql translation service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,SqlTranslationServiceTransport,Callable[..., SqlTranslationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the SqlTranslationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = SqlTranslationServiceClient._read_environment_variables() - self._client_cert_source = SqlTranslationServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = SqlTranslationServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, SqlTranslationServiceTransport) - if transport_provided: - # transport is a SqlTranslationServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(SqlTranslationServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - SqlTranslationServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[SqlTranslationServiceTransport], Callable[..., SqlTranslationServiceTransport]] = ( - SqlTranslationServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., SqlTranslationServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.migration_v2alpha.SqlTranslationServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "credentialsType": None, - } - ) - - def translate_query(self, - request: Optional[Union[translation_service.TranslateQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - source_dialect: Optional[translation_service.TranslateQueryRequest.SqlTranslationSourceDialect] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> translation_service.TranslateQueryResponse: - r"""Translates input queries from source dialects to - GoogleSQL. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_migration_v2alpha - - def sample_translate_query(): - # Create a client - client = bigquery_migration_v2alpha.SqlTranslationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.TranslateQueryRequest( - parent="parent_value", - source_dialect="TERADATA", - query="query_value", - ) - - # Make the request - response = client.translate_query(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest, dict]): - The request object. The request of translating a SQL - query to Standard SQL. - parent (str): - Required. The name of the project to which this - translation request belongs. Example: - ``projects/foo/locations/bar`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - source_dialect (google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect): - Required. The source SQL dialect of ``queries``. - This corresponds to the ``source_dialect`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Required. The query to be translated. - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse: - The response of translating a SQL - query to Standard SQL. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, source_dialect, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, translation_service.TranslateQueryRequest): - request = translation_service.TranslateQueryRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if source_dialect is not None: - request.source_dialect = source_dialect - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.translate_query] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "SqlTranslationServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "SqlTranslationServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/README.rst deleted file mode 100644 index 67b67fcd4e6d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`SqlTranslationServiceTransport` is the ABC for all transports. -- public child `SqlTranslationServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `SqlTranslationServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseSqlTranslationServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `SqlTranslationServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/__init__.py deleted file mode 100644 index 183b9439ec86..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SqlTranslationServiceTransport -from .grpc import SqlTranslationServiceGrpcTransport -from .grpc_asyncio import SqlTranslationServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SqlTranslationServiceTransport]] -_transport_registry['grpc'] = SqlTranslationServiceGrpcTransport -_transport_registry['grpc_asyncio'] = SqlTranslationServiceGrpcAsyncIOTransport - -__all__ = ( - 'SqlTranslationServiceTransport', - 'SqlTranslationServiceGrpcTransport', - 'SqlTranslationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/base.py deleted file mode 100644 index 41dcafcf0be4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/base.py +++ /dev/null @@ -1,154 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_migration_v2alpha import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import translation_service - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class SqlTranslationServiceTransport(abc.ABC): - """Abstract transport class for SqlTranslationService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigquerymigration.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.translate_query: gapic_v1.method.wrap_method( - self.translate_query, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def translate_query(self) -> Callable[ - [translation_service.TranslateQueryRequest], - Union[ - translation_service.TranslateQueryResponse, - Awaitable[translation_service.TranslateQueryResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'SqlTranslationServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc.py deleted file mode 100644 index fd9c4bf7eab1..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc.py +++ /dev/null @@ -1,349 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import translation_service -from .base import SqlTranslationServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class SqlTranslationServiceGrpcTransport(SqlTranslationServiceTransport): - """gRPC backend transport for SqlTranslationService. - - Provides other SQL dialects to GoogleSQL translation - operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def translate_query(self) -> Callable[ - [translation_service.TranslateQueryRequest], - translation_service.TranslateQueryResponse]: - r"""Return a callable for the translate query method over gRPC. - - Translates input queries from source dialects to - GoogleSQL. - - Returns: - Callable[[~.TranslateQueryRequest], - ~.TranslateQueryResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'translate_query' not in self._stubs: - self._stubs['translate_query'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.SqlTranslationService/TranslateQuery', - request_serializer=translation_service.TranslateQueryRequest.serialize, - response_deserializer=translation_service.TranslateQueryResponse.deserialize, - ) - return self._stubs['translate_query'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'SqlTranslationServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc_asyncio.py deleted file mode 100644 index 62183093d568..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,369 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import translation_service -from .base import SqlTranslationServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import SqlTranslationServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class SqlTranslationServiceGrpcAsyncIOTransport(SqlTranslationServiceTransport): - """gRPC AsyncIO backend transport for SqlTranslationService. - - Provides other SQL dialects to GoogleSQL translation - operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigquerymigration.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigquerymigration.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def translate_query(self) -> Callable[ - [translation_service.TranslateQueryRequest], - Awaitable[translation_service.TranslateQueryResponse]]: - r"""Return a callable for the translate query method over gRPC. - - Translates input queries from source dialects to - GoogleSQL. - - Returns: - Callable[[~.TranslateQueryRequest], - Awaitable[~.TranslateQueryResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'translate_query' not in self._stubs: - self._stubs['translate_query'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.migration.v2alpha.SqlTranslationService/TranslateQuery', - request_serializer=translation_service.TranslateQueryRequest.serialize, - response_deserializer=translation_service.TranslateQueryResponse.deserialize, - ) - return self._stubs['translate_query'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.translate_query: self._wrap_method( - self.translate_query, - default_timeout=None, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'SqlTranslationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/__init__.py deleted file mode 100644 index 52a84eb0e535..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/__init__.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .assessment_task import ( - AssessmentOrchestrationResultDetails, - AssessmentTaskDetails, -) -from .migration_entities import ( - MigrationSubtask, - MigrationTask, - MigrationTaskOrchestrationResult, - MigrationWorkflow, -) -from .migration_error_details import ( - ErrorDetail, - ErrorLocation, - ResourceErrorDetail, -) -from .migration_metrics import ( - Point, - TimeInterval, - TimeSeries, - TypedValue, -) -from .migration_service import ( - CreateMigrationWorkflowRequest, - DeleteMigrationWorkflowRequest, - GetMigrationSubtaskRequest, - GetMigrationWorkflowRequest, - ListMigrationSubtasksRequest, - ListMigrationSubtasksResponse, - ListMigrationWorkflowsRequest, - ListMigrationWorkflowsResponse, - StartMigrationWorkflowRequest, -) -from .translation_service import ( - SqlTranslationError, - SqlTranslationErrorDetail, - SqlTranslationWarning, - TranslateQueryRequest, - TranslateQueryResponse, -) -from .translation_task import ( - BteqOptions, - DatasetReference, - Filter, - IdentifierSettings, - TeradataOptions, - TranslationFileMapping, - TranslationTaskDetails, -) - -__all__ = ( - 'AssessmentOrchestrationResultDetails', - 'AssessmentTaskDetails', - 'MigrationSubtask', - 'MigrationTask', - 'MigrationTaskOrchestrationResult', - 'MigrationWorkflow', - 'ErrorDetail', - 'ErrorLocation', - 'ResourceErrorDetail', - 'Point', - 'TimeInterval', - 'TimeSeries', - 'TypedValue', - 'CreateMigrationWorkflowRequest', - 'DeleteMigrationWorkflowRequest', - 'GetMigrationSubtaskRequest', - 'GetMigrationWorkflowRequest', - 'ListMigrationSubtasksRequest', - 'ListMigrationSubtasksResponse', - 'ListMigrationWorkflowsRequest', - 'ListMigrationWorkflowsResponse', - 'StartMigrationWorkflowRequest', - 'SqlTranslationError', - 'SqlTranslationErrorDetail', - 'SqlTranslationWarning', - 'TranslateQueryRequest', - 'TranslateQueryResponse', - 'BteqOptions', - 'DatasetReference', - 'Filter', - 'IdentifierSettings', - 'TeradataOptions', - 'TranslationFileMapping', - 'TranslationTaskDetails', -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py deleted file mode 100644 index ad303a5c8027..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'AssessmentTaskDetails', - 'AssessmentOrchestrationResultDetails', - }, -) - - -class AssessmentTaskDetails(proto.Message): - r"""Assessment task config. - - Attributes: - input_path (str): - Required. The Cloud Storage path for - assessment input files. - output_dataset (str): - Required. The BigQuery dataset for output. - querylogs_path (str): - Optional. An optional Cloud Storage path to - write the query logs (which is then used as an - input path on the translation task) - data_source (str): - Required. The data source or data warehouse - type (eg: TERADATA/REDSHIFT) from which the - input data is extracted. - """ - - input_path: str = proto.Field( - proto.STRING, - number=1, - ) - output_dataset: str = proto.Field( - proto.STRING, - number=2, - ) - querylogs_path: str = proto.Field( - proto.STRING, - number=3, - ) - data_source: str = proto.Field( - proto.STRING, - number=4, - ) - - -class AssessmentOrchestrationResultDetails(proto.Message): - r"""Details for an assessment task orchestration result. - - Attributes: - output_tables_schema_version (str): - Optional. The version used for the output - table schemas. - """ - - output_tables_schema_version: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py deleted file mode 100644 index d7abae5caed3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py +++ /dev/null @@ -1,399 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import assessment_task -from google.cloud.bigquery_migration_v2alpha.types import migration_error_details -from google.cloud.bigquery_migration_v2alpha.types import migration_metrics -from google.cloud.bigquery_migration_v2alpha.types import translation_task -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'MigrationWorkflow', - 'MigrationTask', - 'MigrationSubtask', - 'MigrationTaskOrchestrationResult', - }, -) - - -class MigrationWorkflow(proto.Message): - r"""A migration workflow which specifies what needs to be done - for an EDW migration. - - Attributes: - name (str): - Output only. Immutable. The unique identifier for the - migration workflow. The ID is server-generated. - - Example: ``projects/123/locations/us/workflows/345`` - display_name (str): - The display name of the workflow. This can be - set to give a workflow a descriptive name. There - is no guarantee or enforcement of uniqueness. - tasks (MutableMapping[str, google.cloud.bigquery_migration_v2alpha.types.MigrationTask]): - The tasks in a workflow in a named map. The - name (i.e. key) has no meaning and is merely a - convenient way to address a specific task in a - workflow. - state (google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow.State): - Output only. That status of the workflow. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the workflow was created. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the workflow was last updated. - """ - class State(proto.Enum): - r"""Possible migration workflow states. - - Values: - STATE_UNSPECIFIED (0): - Workflow state is unspecified. - DRAFT (1): - Workflow is in draft status, i.e. tasks are - not yet eligible for execution. - RUNNING (2): - Workflow is running (i.e. tasks are eligible - for execution). - PAUSED (3): - Workflow is paused. Tasks currently in - progress may continue, but no further tasks will - be scheduled. - COMPLETED (4): - Workflow is complete. There should not be any - task in a non-terminal state, but if they are - (e.g. forced termination), they will not be - scheduled. - """ - STATE_UNSPECIFIED = 0 - DRAFT = 1 - RUNNING = 2 - PAUSED = 3 - COMPLETED = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=6, - ) - tasks: MutableMapping[str, 'MigrationTask'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=2, - message='MigrationTask', - ) - state: State = proto.Field( - proto.ENUM, - number=3, - enum=State, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - - -class MigrationTask(proto.Message): - r"""A single task for a migration which has details about the - configuration of the task. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - assessment_task_details (google.cloud.bigquery_migration_v2alpha.types.AssessmentTaskDetails): - Task configuration for Assessment. - - This field is a member of `oneof`_ ``task_details``. - translation_task_details (google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails): - Task configuration for Batch/Offline SQL - Translation. - - This field is a member of `oneof`_ ``task_details``. - id (str): - Output only. Immutable. The unique identifier - for the migration task. The ID is - server-generated. - type_ (str): - The type of the task. This must be a - supported task type. - details (google.protobuf.any_pb2.Any): - DEPRECATED! Use one of the task_details below. The details - of the task. The type URL must be one of the supported task - details messages and correspond to the Task's type. - state (google.cloud.bigquery_migration_v2alpha.types.MigrationTask.State): - Output only. The current state of the task. - processing_error (google.rpc.error_details_pb2.ErrorInfo): - Output only. An explanation that may be - populated when the task is in FAILED state. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the task was created. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the task was last updated. - orchestration_result (google.cloud.bigquery_migration_v2alpha.types.MigrationTaskOrchestrationResult): - Output only. Additional information about the - orchestration. - """ - class State(proto.Enum): - r"""Possible states of a migration task. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - PENDING (1): - The task is waiting for orchestration. - ORCHESTRATING (2): - The task is assigned to an orchestrator. - RUNNING (3): - The task is running, i.e. its subtasks are - ready for execution. - PAUSED (4): - Tha task is paused. Assigned subtasks can - continue, but no new subtasks will be scheduled. - SUCCEEDED (5): - The task finished successfully. - FAILED (6): - The task finished unsuccessfully. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - ORCHESTRATING = 2 - RUNNING = 3 - PAUSED = 4 - SUCCEEDED = 5 - FAILED = 6 - - assessment_task_details: assessment_task.AssessmentTaskDetails = proto.Field( - proto.MESSAGE, - number=12, - oneof='task_details', - message=assessment_task.AssessmentTaskDetails, - ) - translation_task_details: translation_task.TranslationTaskDetails = proto.Field( - proto.MESSAGE, - number=13, - oneof='task_details', - message=translation_task.TranslationTaskDetails, - ) - id: str = proto.Field( - proto.STRING, - number=1, - ) - type_: str = proto.Field( - proto.STRING, - number=2, - ) - details: any_pb2.Any = proto.Field( - proto.MESSAGE, - number=3, - message=any_pb2.Any, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - processing_error: error_details_pb2.ErrorInfo = proto.Field( - proto.MESSAGE, - number=5, - message=error_details_pb2.ErrorInfo, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - orchestration_result: 'MigrationTaskOrchestrationResult' = proto.Field( - proto.MESSAGE, - number=10, - message='MigrationTaskOrchestrationResult', - ) - - -class MigrationSubtask(proto.Message): - r"""A subtask for a migration which carries details about the - configuration of the subtask. The content of the details should - not matter to the end user, but is a contract between the - subtask creator and subtask worker. - - Attributes: - name (str): - Output only. Immutable. The resource name for the migration - subtask. The ID is server-generated. - - Example: - ``projects/123/locations/us/workflows/345/subtasks/678`` - task_id (str): - The unique ID of the task to which this - subtask belongs. - type_ (str): - The type of the Subtask. The migration - service does not check whether this is a known - type. It is up to the task creator (i.e. - orchestrator or worker) to ensure it only - creates subtasks for which there are compatible - workers polling for Subtasks. - state (google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask.State): - Output only. The current state of the - subtask. - processing_error (google.rpc.error_details_pb2.ErrorInfo): - Output only. An explanation that may be - populated when the task is in FAILED state. - resource_error_details (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.ResourceErrorDetail]): - Output only. Provides details to errors and - issues encountered while processing the subtask. - Presence of error details does not mean that the - subtask failed. - resource_error_count (int): - The number or resources with errors. Note: This is not the - total number of errors as each resource can have more than - one error. This is used to indicate truncation by having a - ``resource_error_count`` that is higher than the size of - ``resource_error_details``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the subtask was created. - last_update_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the subtask was last updated. - metrics (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.TimeSeries]): - The metrics for the subtask. - """ - class State(proto.Enum): - r"""Possible states of a migration subtask. - - Values: - STATE_UNSPECIFIED (0): - The state is unspecified. - ACTIVE (1): - The subtask is ready, i.e. it is ready for - execution. - RUNNING (2): - The subtask is running, i.e. it is assigned - to a worker for execution. - SUCCEEDED (3): - The subtask finished successfully. - FAILED (4): - The subtask finished unsuccessfully. - PAUSED (5): - The subtask is paused, i.e., it will not be - scheduled. If it was already assigned,it might - still finish but no new lease renewals will be - granted. - """ - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - RUNNING = 2 - SUCCEEDED = 3 - FAILED = 4 - PAUSED = 5 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - task_id: str = proto.Field( - proto.STRING, - number=2, - ) - type_: str = proto.Field( - proto.STRING, - number=3, - ) - state: State = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - processing_error: error_details_pb2.ErrorInfo = proto.Field( - proto.MESSAGE, - number=6, - message=error_details_pb2.ErrorInfo, - ) - resource_error_details: MutableSequence[migration_error_details.ResourceErrorDetail] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message=migration_error_details.ResourceErrorDetail, - ) - resource_error_count: int = proto.Field( - proto.INT32, - number=13, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - last_update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - metrics: MutableSequence[migration_metrics.TimeSeries] = proto.RepeatedField( - proto.MESSAGE, - number=11, - message=migration_metrics.TimeSeries, - ) - - -class MigrationTaskOrchestrationResult(proto.Message): - r"""Additional information from the orchestrator when it is done - with the task orchestration. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - assessment_details (google.cloud.bigquery_migration_v2alpha.types.AssessmentOrchestrationResultDetails): - Details specific to assessment task types. - - This field is a member of `oneof`_ ``details``. - """ - - assessment_details: assessment_task.AssessmentOrchestrationResultDetails = proto.Field( - proto.MESSAGE, - number=1, - oneof='details', - message=assessment_task.AssessmentOrchestrationResultDetails, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_error_details.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_error_details.py deleted file mode 100644 index 340e34cef26d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_error_details.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.rpc import error_details_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'ResourceErrorDetail', - 'ErrorDetail', - 'ErrorLocation', - }, -) - - -class ResourceErrorDetail(proto.Message): - r"""Provides details for errors and the corresponding resources. - - Attributes: - resource_info (google.rpc.error_details_pb2.ResourceInfo): - Required. Information about the resource - where the error is located. - error_details (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.ErrorDetail]): - Required. The error details for the resource. - error_count (int): - Required. How many errors there are in total for the - resource. Truncation can be indicated by having an - ``error_count`` that is higher than the size of - ``error_details``. - """ - - resource_info: error_details_pb2.ResourceInfo = proto.Field( - proto.MESSAGE, - number=1, - message=error_details_pb2.ResourceInfo, - ) - error_details: MutableSequence['ErrorDetail'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ErrorDetail', - ) - error_count: int = proto.Field( - proto.INT32, - number=3, - ) - - -class ErrorDetail(proto.Message): - r"""Provides details for errors, e.g. issues that where - encountered when processing a subtask. - - Attributes: - location (google.cloud.bigquery_migration_v2alpha.types.ErrorLocation): - Optional. The exact location within the - resource (if applicable). - error_info (google.rpc.error_details_pb2.ErrorInfo): - Required. Describes the cause of the error - with structured detail. - """ - - location: 'ErrorLocation' = proto.Field( - proto.MESSAGE, - number=1, - message='ErrorLocation', - ) - error_info: error_details_pb2.ErrorInfo = proto.Field( - proto.MESSAGE, - number=2, - message=error_details_pb2.ErrorInfo, - ) - - -class ErrorLocation(proto.Message): - r"""Holds information about where the error is located. - - Attributes: - line (int): - Optional. If applicable, denotes the line - where the error occurred. A zero value means - that there is no line information. - column (int): - Optional. If applicable, denotes the column - where the error occurred. A zero value means - that there is no columns information. - """ - - line: int = proto.Field( - proto.INT32, - number=1, - ) - column: int = proto.Field( - proto.INT32, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py deleted file mode 100644 index a124807b5e6a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py +++ /dev/null @@ -1,214 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.api import distribution_pb2 # type: ignore -from google.api import metric_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'TimeSeries', - 'Point', - 'TimeInterval', - 'TypedValue', - }, -) - - -class TimeSeries(proto.Message): - r"""The metrics object for a SubTask. - - Attributes: - metric (str): - Required. The name of the metric. - - If the metric is not known by the service yet, - it will be auto-created. - value_type (google.api.metric_pb2.ValueType): - Required. The value type of the time series. - metric_kind (google.api.metric_pb2.MetricKind): - Optional. The metric kind of the time series. - - If present, it must be the same as the metric kind of the - associated metric. If the associated metric's descriptor - must be auto-created, then this field specifies the metric - kind of the new descriptor and must be either ``GAUGE`` (the - default) or ``CUMULATIVE``. - points (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.Point]): - Required. The data points of this time series. When listing - time series, points are returned in reverse time order. - - When creating a time series, this field must contain exactly - one point and the point's type must be the same as the value - type of the associated metric. If the associated metric's - descriptor must be auto-created, then the value type of the - descriptor is determined by the point's type, which must be - ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``. - """ - - metric: str = proto.Field( - proto.STRING, - number=1, - ) - value_type: metric_pb2.MetricDescriptor.ValueType = proto.Field( - proto.ENUM, - number=2, - enum=metric_pb2.MetricDescriptor.ValueType, - ) - metric_kind: metric_pb2.MetricDescriptor.MetricKind = proto.Field( - proto.ENUM, - number=3, - enum=metric_pb2.MetricDescriptor.MetricKind, - ) - points: MutableSequence['Point'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Point', - ) - - -class Point(proto.Message): - r"""A single data point in a time series. - - Attributes: - interval (google.cloud.bigquery_migration_v2alpha.types.TimeInterval): - The time interval to which the data point applies. For - ``GAUGE`` metrics, the start time does not need to be - supplied, but if it is supplied, it must equal the end time. - For ``DELTA`` metrics, the start and end time should specify - a non-zero interval, with subsequent points specifying - contiguous and non-overlapping intervals. For ``CUMULATIVE`` - metrics, the start and end time should specify a non-zero - interval, with subsequent points specifying the same start - time and increasing end times, until an event resets the - cumulative value to zero and sets a new start time for the - following points. - value (google.cloud.bigquery_migration_v2alpha.types.TypedValue): - The value of the data point. - """ - - interval: 'TimeInterval' = proto.Field( - proto.MESSAGE, - number=1, - message='TimeInterval', - ) - value: 'TypedValue' = proto.Field( - proto.MESSAGE, - number=2, - message='TypedValue', - ) - - -class TimeInterval(proto.Message): - r"""A time interval extending just after a start time through an - end time. If the start time is the same as the end time, then - the interval represents a single point in time. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. The beginning of the time interval. - The default value for the start time is the end - time. The start time must not be later than the - end time. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Required. The end of the time interval. - """ - - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class TypedValue(proto.Message): - r"""A single strongly-typed value. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - bool_value (bool): - A Boolean value: ``true`` or ``false``. - - This field is a member of `oneof`_ ``value``. - int64_value (int): - A 64-bit integer. Its range is approximately - +/-9.2x10^18. - - This field is a member of `oneof`_ ``value``. - double_value (float): - A 64-bit double-precision floating-point - number. Its magnitude is approximately - +/-10^(+/-300) and it has 16 significant digits - of precision. - - This field is a member of `oneof`_ ``value``. - string_value (str): - A variable-length string value. - - This field is a member of `oneof`_ ``value``. - distribution_value (google.api.distribution_pb2.Distribution): - A distribution value. - - This field is a member of `oneof`_ ``value``. - """ - - bool_value: bool = proto.Field( - proto.BOOL, - number=1, - oneof='value', - ) - int64_value: int = proto.Field( - proto.INT64, - number=2, - oneof='value', - ) - double_value: float = proto.Field( - proto.DOUBLE, - number=3, - oneof='value', - ) - string_value: str = proto.Field( - proto.STRING, - number=4, - oneof='value', - ) - distribution_value: distribution_pb2.Distribution = proto.Field( - proto.MESSAGE, - number=5, - oneof='value', - message=distribution_pb2.Distribution, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_service.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_service.py deleted file mode 100644 index cd2840675ffb..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/migration_service.py +++ /dev/null @@ -1,288 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'CreateMigrationWorkflowRequest', - 'GetMigrationWorkflowRequest', - 'ListMigrationWorkflowsRequest', - 'ListMigrationWorkflowsResponse', - 'DeleteMigrationWorkflowRequest', - 'StartMigrationWorkflowRequest', - 'GetMigrationSubtaskRequest', - 'ListMigrationSubtasksRequest', - 'ListMigrationSubtasksResponse', - }, -) - - -class CreateMigrationWorkflowRequest(proto.Message): - r"""Request to create a migration workflow resource. - - Attributes: - parent (str): - Required. The name of the project to which this migration - workflow belongs. Example: ``projects/foo/locations/bar`` - migration_workflow (google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow): - Required. The migration workflow to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - migration_workflow: migration_entities.MigrationWorkflow = proto.Field( - proto.MESSAGE, - number=2, - message=migration_entities.MigrationWorkflow, - ) - - -class GetMigrationWorkflowRequest(proto.Message): - r"""A request to get a previously created migration workflow. - - Attributes: - name (str): - Required. The unique identifier for the migration workflow. - Example: ``projects/123/locations/us/workflows/1234`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be retrieved. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListMigrationWorkflowsRequest(proto.Message): - r"""A request to list previously created migration workflows. - - Attributes: - parent (str): - Required. The project and location of the migration - workflows to list. Example: ``projects/123/locations/us`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - The list of fields to be retrieved. - page_size (int): - The maximum number of migration workflows to - return. The service may return fewer than this - number. - page_token (str): - A page token, received from previous - ``ListMigrationWorkflows`` call. Provide this to retrieve - the subsequent page. - - When paginating, all other parameters provided to - ``ListMigrationWorkflows`` must match the call that provided - the page token. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListMigrationWorkflowsResponse(proto.Message): - r"""Response object for a ``ListMigrationWorkflows`` call. - - Attributes: - migration_workflows (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow]): - The migration workflows for the specified - project / location. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - migration_workflows: MutableSequence[migration_entities.MigrationWorkflow] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=migration_entities.MigrationWorkflow, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteMigrationWorkflowRequest(proto.Message): - r"""A request to delete a previously created migration workflow. - - Attributes: - name (str): - Required. The unique identifier for the migration workflow. - Example: ``projects/123/locations/us/workflows/1234`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StartMigrationWorkflowRequest(proto.Message): - r"""A request to start a previously created migration workflow. - - Attributes: - name (str): - Required. The unique identifier for the migration workflow. - Example: ``projects/123/locations/us/workflows/1234`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class GetMigrationSubtaskRequest(proto.Message): - r"""A request to get a previously created migration subtasks. - - Attributes: - name (str): - Required. The unique identifier for the migration subtask. - Example: - ``projects/123/locations/us/workflows/1234/subtasks/543`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The list of fields to be retrieved. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListMigrationSubtasksRequest(proto.Message): - r"""A request to list previously created migration subtasks. - - Attributes: - parent (str): - Required. The migration task of the subtasks to list. - Example: ``projects/123/locations/us/workflows/1234`` - read_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. The list of fields to be retrieved. - page_size (int): - Optional. The maximum number of migration - tasks to return. The service may return fewer - than this number. - page_token (str): - Optional. A page token, received from previous - ``ListMigrationSubtasks`` call. Provide this to retrieve the - subsequent page. - - When paginating, all other parameters provided to - ``ListMigrationSubtasks`` must match the call that provided - the page token. - filter (str): - Optional. The filter to apply. This can be used to get the - subtasks of a specific tasks in a workflow, e.g. - ``migration_task = "ab012"`` where ``"ab012"`` is the task - ID (not the name in the named map). - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - read_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - filter: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListMigrationSubtasksResponse(proto.Message): - r"""Response object for a ``ListMigrationSubtasks`` call. - - Attributes: - migration_subtasks (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask]): - The migration subtasks for the specified - task. - next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the - next page. If this field is omitted, there are no subsequent - pages. - """ - - @property - def raw_page(self): - return self - - migration_subtasks: MutableSequence[migration_entities.MigrationSubtask] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=migration_entities.MigrationSubtask, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/translation_service.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/translation_service.py deleted file mode 100644 index a39cdf5acc91..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/translation_service.py +++ /dev/null @@ -1,201 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'TranslateQueryRequest', - 'TranslateQueryResponse', - 'SqlTranslationErrorDetail', - 'SqlTranslationError', - 'SqlTranslationWarning', - }, -) - - -class TranslateQueryRequest(proto.Message): - r"""The request of translating a SQL query to Standard SQL. - - Attributes: - parent (str): - Required. The name of the project to which this translation - request belongs. Example: ``projects/foo/locations/bar`` - source_dialect (google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect): - Required. The source SQL dialect of ``queries``. - query (str): - Required. The query to be translated. - """ - class SqlTranslationSourceDialect(proto.Enum): - r"""Supported SQL translation source dialects. - - Values: - SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED (0): - SqlTranslationSourceDialect not specified. - TERADATA (1): - Teradata SQL. - """ - SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED = 0 - TERADATA = 1 - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - source_dialect: SqlTranslationSourceDialect = proto.Field( - proto.ENUM, - number=2, - enum=SqlTranslationSourceDialect, - ) - query: str = proto.Field( - proto.STRING, - number=3, - ) - - -class TranslateQueryResponse(proto.Message): - r"""The response of translating a SQL query to Standard SQL. - - Attributes: - translation_job (str): - Output only. Immutable. The unique identifier for the SQL - translation job. Example: - ``projects/123/locations/us/translation/1234`` - translated_query (str): - The translated result. This will be empty if - the translation fails. - errors (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.SqlTranslationError]): - The list of errors encountered during the - translation, if present. - warnings (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.SqlTranslationWarning]): - The list of warnings encountered during the - translation, if present, indicates - non-semantically correct translation. - """ - - translation_job: str = proto.Field( - proto.STRING, - number=4, - ) - translated_query: str = proto.Field( - proto.STRING, - number=1, - ) - errors: MutableSequence['SqlTranslationError'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='SqlTranslationError', - ) - warnings: MutableSequence['SqlTranslationWarning'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='SqlTranslationWarning', - ) - - -class SqlTranslationErrorDetail(proto.Message): - r"""Structured error object capturing the error message and the - location in the source text where the error occurs. - - Attributes: - row (int): - Specifies the row from the source text where - the error occurred. - column (int): - Specifie the column from the source texts - where the error occurred. - message (str): - A human-readable description of the error. - """ - - row: int = proto.Field( - proto.INT64, - number=1, - ) - column: int = proto.Field( - proto.INT64, - number=2, - ) - message: str = proto.Field( - proto.STRING, - number=3, - ) - - -class SqlTranslationError(proto.Message): - r"""The detailed error object if the SQL translation job fails. - - Attributes: - error_type (google.cloud.bigquery_migration_v2alpha.types.SqlTranslationError.SqlTranslationErrorType): - The type of SQL translation error. - error_detail (google.cloud.bigquery_migration_v2alpha.types.SqlTranslationErrorDetail): - Specifies the details of the error, including - the error message and location from the source - text. - """ - class SqlTranslationErrorType(proto.Enum): - r"""The error type of the SQL translation job. - - Values: - SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED (0): - SqlTranslationErrorType not specified. - SQL_PARSE_ERROR (1): - Failed to parse the input text as a SQL - query. - UNSUPPORTED_SQL_FUNCTION (2): - Found unsupported functions in the input SQL - query that are not able to translate. - """ - SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED = 0 - SQL_PARSE_ERROR = 1 - UNSUPPORTED_SQL_FUNCTION = 2 - - error_type: SqlTranslationErrorType = proto.Field( - proto.ENUM, - number=1, - enum=SqlTranslationErrorType, - ) - error_detail: 'SqlTranslationErrorDetail' = proto.Field( - proto.MESSAGE, - number=2, - message='SqlTranslationErrorDetail', - ) - - -class SqlTranslationWarning(proto.Message): - r"""The detailed warning object if the SQL translation job is - completed but not semantically correct. - - Attributes: - warning_detail (google.cloud.bigquery_migration_v2alpha.types.SqlTranslationErrorDetail): - Specifies the details of the warning, - including the warning message and location from - the source text. - """ - - warning_detail: 'SqlTranslationErrorDetail' = proto.Field( - proto.MESSAGE, - number=1, - message='SqlTranslationErrorDetail', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/translation_task.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/translation_task.py deleted file mode 100644 index 11f894fa575d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/google/cloud/bigquery_migration_v2alpha/types/translation_task.py +++ /dev/null @@ -1,368 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.migration.v2alpha', - manifest={ - 'TranslationFileMapping', - 'TranslationTaskDetails', - 'Filter', - 'IdentifierSettings', - 'TeradataOptions', - 'BteqOptions', - 'DatasetReference', - }, -) - - -class TranslationFileMapping(proto.Message): - r"""Mapping between an input and output file to be translated in - a subtask. - - Attributes: - input_path (str): - The Cloud Storage path for a file to - translation in a subtask. - output_path (str): - The Cloud Storage path to write back the - corresponding input file to. - """ - - input_path: str = proto.Field( - proto.STRING, - number=1, - ) - output_path: str = proto.Field( - proto.STRING, - number=2, - ) - - -class TranslationTaskDetails(proto.Message): - r"""The translation task config to capture necessary settings for - a translation task and subtask. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - teradata_options (google.cloud.bigquery_migration_v2alpha.types.TeradataOptions): - The Teradata SQL specific settings for the - translation task. - - This field is a member of `oneof`_ ``language_options``. - bteq_options (google.cloud.bigquery_migration_v2alpha.types.BteqOptions): - The BTEQ specific settings for the - translation task. - - This field is a member of `oneof`_ ``language_options``. - input_path (str): - The Cloud Storage path for translation input - files. - output_path (str): - The Cloud Storage path for translation output - files. - file_paths (MutableSequence[google.cloud.bigquery_migration_v2alpha.types.TranslationFileMapping]): - Cloud Storage files to be processed for - translation. - schema_path (str): - The Cloud Storage path to DDL files as table - schema to assist semantic translation. - file_encoding (google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails.FileEncoding): - The file encoding type. - identifier_settings (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings): - The settings for SQL identifiers. - special_token_map (MutableMapping[str, google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails.TokenType]): - The map capturing special tokens to be - replaced during translation. The key is special - token in string. The value is the token data - type. This is used to translate SQL query - template which contains special token as place - holder. The special token makes a query invalid - to parse. This map will be applied to annotate - those special token with types to let parser - understand how to parse them into proper - structure with type information. - filter (google.cloud.bigquery_migration_v2alpha.types.Filter): - The filter applied to translation details. - translation_exception_table (str): - Specifies the exact name of the bigquery - table ("dataset.table") to be used for surfacing - raw translation errors. If the table does not - exist, we will create it. If it already exists - and the schema is the same, we will re-use. If - the table exists and the schema is different, we - will throw an error. - """ - class FileEncoding(proto.Enum): - r"""The file encoding types. - - Values: - FILE_ENCODING_UNSPECIFIED (0): - File encoding setting is not specified. - UTF_8 (1): - File encoding is UTF_8. - ISO_8859_1 (2): - File encoding is ISO_8859_1. - US_ASCII (3): - File encoding is US_ASCII. - UTF_16 (4): - File encoding is UTF_16. - UTF_16LE (5): - File encoding is UTF_16LE. - UTF_16BE (6): - File encoding is UTF_16BE. - """ - FILE_ENCODING_UNSPECIFIED = 0 - UTF_8 = 1 - ISO_8859_1 = 2 - US_ASCII = 3 - UTF_16 = 4 - UTF_16LE = 5 - UTF_16BE = 6 - - class TokenType(proto.Enum): - r"""The special token data type. - - Values: - TOKEN_TYPE_UNSPECIFIED (0): - Token type is not specified. - STRING (1): - Token type as string. - INT64 (2): - Token type as integer. - NUMERIC (3): - Token type as numeric. - BOOL (4): - Token type as boolean. - FLOAT64 (5): - Token type as float. - DATE (6): - Token type as date. - TIMESTAMP (7): - Token type as timestamp. - """ - TOKEN_TYPE_UNSPECIFIED = 0 - STRING = 1 - INT64 = 2 - NUMERIC = 3 - BOOL = 4 - FLOAT64 = 5 - DATE = 6 - TIMESTAMP = 7 - - teradata_options: 'TeradataOptions' = proto.Field( - proto.MESSAGE, - number=10, - oneof='language_options', - message='TeradataOptions', - ) - bteq_options: 'BteqOptions' = proto.Field( - proto.MESSAGE, - number=11, - oneof='language_options', - message='BteqOptions', - ) - input_path: str = proto.Field( - proto.STRING, - number=1, - ) - output_path: str = proto.Field( - proto.STRING, - number=2, - ) - file_paths: MutableSequence['TranslationFileMapping'] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message='TranslationFileMapping', - ) - schema_path: str = proto.Field( - proto.STRING, - number=3, - ) - file_encoding: FileEncoding = proto.Field( - proto.ENUM, - number=4, - enum=FileEncoding, - ) - identifier_settings: 'IdentifierSettings' = proto.Field( - proto.MESSAGE, - number=5, - message='IdentifierSettings', - ) - special_token_map: MutableMapping[str, TokenType] = proto.MapField( - proto.STRING, - proto.ENUM, - number=6, - enum=TokenType, - ) - filter: 'Filter' = proto.Field( - proto.MESSAGE, - number=7, - message='Filter', - ) - translation_exception_table: str = proto.Field( - proto.STRING, - number=13, - ) - - -class Filter(proto.Message): - r"""The filter applied to fields of translation details. - - Attributes: - input_file_exclusion_prefixes (MutableSequence[str]): - The list of prefixes used to exclude - processing for input files. - """ - - input_file_exclusion_prefixes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class IdentifierSettings(proto.Message): - r"""Settings related to SQL identifiers. - - Attributes: - output_identifier_case (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings.IdentifierCase): - The setting to control output queries' - identifier case. - identifier_rewrite_mode (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings.IdentifierRewriteMode): - Specifies the rewrite mode for SQL - identifiers. - """ - class IdentifierCase(proto.Enum): - r"""The identifier case type. - - Values: - IDENTIFIER_CASE_UNSPECIFIED (0): - The identifier case is not specified. - ORIGINAL (1): - Identifiers' cases will be kept as the - original cases. - UPPER (2): - Identifiers will be in upper cases. - LOWER (3): - Identifiers will be in lower cases. - """ - IDENTIFIER_CASE_UNSPECIFIED = 0 - ORIGINAL = 1 - UPPER = 2 - LOWER = 3 - - class IdentifierRewriteMode(proto.Enum): - r"""The SQL identifier rewrite mode. - - Values: - IDENTIFIER_REWRITE_MODE_UNSPECIFIED (0): - SQL Identifier rewrite mode is unspecified. - NONE (1): - SQL identifiers won't be rewrite. - REWRITE_ALL (2): - All SQL identifiers will be rewrite. - """ - IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0 - NONE = 1 - REWRITE_ALL = 2 - - output_identifier_case: IdentifierCase = proto.Field( - proto.ENUM, - number=1, - enum=IdentifierCase, - ) - identifier_rewrite_mode: IdentifierRewriteMode = proto.Field( - proto.ENUM, - number=2, - enum=IdentifierRewriteMode, - ) - - -class TeradataOptions(proto.Message): - r"""Teradata SQL specific translation task related settings. - """ - - -class BteqOptions(proto.Message): - r"""BTEQ translation task related settings. - - Attributes: - project_dataset (google.cloud.bigquery_migration_v2alpha.types.DatasetReference): - Specifies the project and dataset in BigQuery - that will be used for external table creation - during the translation. - default_path_uri (str): - The Cloud Storage location to be used as the - default path for files that are not otherwise - specified in the file replacement map. - file_replacement_map (MutableMapping[str, str]): - Maps the local paths that are used in BTEQ - scripts (the keys) to the paths in Cloud Storage - that should be used in their stead in the - translation (the value). - """ - - project_dataset: 'DatasetReference' = proto.Field( - proto.MESSAGE, - number=1, - message='DatasetReference', - ) - default_path_uri: str = proto.Field( - proto.STRING, - number=2, - ) - file_replacement_map: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class DatasetReference(proto.Message): - r"""Reference to a BigQuery dataset. - - Attributes: - dataset_id (str): - A unique ID for this dataset, without the project name. The - ID must contain only letters (a-z, A-Z), numbers (0-9), or - underscores (_). The maximum length is 1,024 characters. - project_id (str): - The ID of the project containing this - dataset. - """ - - dataset_id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/mypy.ini b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/noxfile.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/noxfile.py deleted file mode 100644 index 8dd565a981ae..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-migration' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_migration_v2alpha/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_migration_v2alpha/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py deleted file mode 100644 index 86b9776664c7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py deleted file mode 100644 index 27afbfcdbd49..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_create_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py deleted file mode 100644 index 31de497a4a4f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.delete_migration_workflow(request=request) - - -# [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py deleted file mode 100644 index db98d4e5cd51..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_delete_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.delete_migration_workflow(request=request) - - -# [END bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py deleted file mode 100644 index 8b3fb302accd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationSubtask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_subtask(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py deleted file mode 100644 index e61027040273..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationSubtask -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_get_migration_subtask(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_subtask(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py deleted file mode 100644 index 7f7876b98e3c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = await client.get_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py deleted file mode 100644 index 37610f9512f5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_get_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - response = client.get_migration_workflow(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py deleted file mode 100644 index fc03407a3046..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationSubtasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py deleted file mode 100644 index 548bd77db346..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationSubtasks -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_list_migration_subtasks(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_subtasks(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py deleted file mode 100644 index 6d3b354b9f0e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationWorkflows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py deleted file mode 100644 index db67f3ebf515..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListMigrationWorkflows -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_list_migration_workflows(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_migration_workflows(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py deleted file mode 100644 index fde5b4c7d709..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - await client.start_migration_workflow(request=request) - - -# [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py deleted file mode 100644 index cfca82290289..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StartMigrationWorkflow -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_start_migration_workflow(): - # Create a client - client = bigquery_migration_v2alpha.MigrationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( - name="name_value", - ) - - # Make the request - client.start_migration_workflow(request=request) - - -# [END bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py deleted file mode 100644 index 43ccfe6d661a..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TranslateQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -async def sample_translate_query(): - # Create a client - client = bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.TranslateQueryRequest( - parent="parent_value", - source_dialect="TERADATA", - query="query_value", - ) - - # Make the request - response = await client.translate_query(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_async] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py deleted file mode 100644 index edd20f95dfad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for TranslateQuery -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-migration - - -# [START bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_migration_v2alpha - - -def sample_translate_query(): - # Create a client - client = bigquery_migration_v2alpha.SqlTranslationServiceClient() - - # Initialize request argument(s) - request = bigquery_migration_v2alpha.TranslateQueryRequest( - parent="parent_value", - source_dialect="TERADATA", - query="query_value", - ) - - # Make the request - response = client.translate_query(request=request) - - # Handle the response - print(response) - -# [END bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json deleted file mode 100644 index d849f516e6d6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json +++ /dev/null @@ -1,1315 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.migration.v2alpha", - "version": "v2alpha" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-migration", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.create_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "CreateMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_workflow", - "type": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", - "shortName": "create_migration_workflow" - }, - "description": "Sample for CreateMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.create_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.CreateMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "CreateMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_workflow", - "type": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", - "shortName": "create_migration_workflow" - }, - "description": "Sample for CreateMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_CreateMigrationWorkflow_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_create_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.delete_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "DeleteMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_migration_workflow" - }, - "description": "Sample for DeleteMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.delete_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.DeleteMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "DeleteMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_migration_workflow" - }, - "description": "Sample for DeleteMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_DeleteMigrationWorkflow_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_delete_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.get_migration_subtask", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationSubtask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask", - "shortName": "get_migration_subtask" - }, - "description": "Sample for GetMigrationSubtask", - "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.get_migration_subtask", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationSubtask", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationSubtask" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationSubtask", - "shortName": "get_migration_subtask" - }, - "description": "Sample for GetMigrationSubtask", - "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationSubtask_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_subtask_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.get_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", - "shortName": "get_migration_workflow" - }, - "description": "Sample for GetMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.get_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.GetMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "GetMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.MigrationWorkflow", - "shortName": "get_migration_workflow" - }, - "description": "Sample for GetMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_GetMigrationWorkflow_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_get_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.list_migration_subtasks", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationSubtasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksAsyncPager", - "shortName": "list_migration_subtasks" - }, - "description": "Sample for ListMigrationSubtasks", - "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.list_migration_subtasks", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationSubtasks", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationSubtasks" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksPager", - "shortName": "list_migration_subtasks" - }, - "description": "Sample for ListMigrationSubtasks", - "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationSubtasks_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_subtasks_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.list_migration_workflows", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationWorkflows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager", - "shortName": "list_migration_workflows" - }, - "description": "Sample for ListMigrationWorkflows", - "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.list_migration_workflows", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.ListMigrationWorkflows", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "ListMigrationWorkflows" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsPager", - "shortName": "list_migration_workflows" - }, - "description": "Sample for ListMigrationWorkflows", - "file": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_ListMigrationWorkflows_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_list_migration_workflows_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient", - "shortName": "MigrationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceAsyncClient.start_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "StartMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "start_migration_workflow" - }, - "description": "Sample for StartMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient", - "shortName": "MigrationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.MigrationServiceClient.start_migration_workflow", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService.StartMigrationWorkflow", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.MigrationService", - "shortName": "MigrationService" - }, - "shortName": "StartMigrationWorkflow" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "start_migration_workflow" - }, - "description": "Sample for StartMigrationWorkflow", - "file": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_MigrationService_StartMigrationWorkflow_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_migration_service_start_migration_workflow_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient", - "shortName": "SqlTranslationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceAsyncClient.translate_query", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService.TranslateQuery", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "shortName": "SqlTranslationService" - }, - "shortName": "TranslateQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "source_dialect", - "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse", - "shortName": "translate_query" - }, - "description": "Sample for TranslateQuery", - "file": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_async", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceClient", - "shortName": "SqlTranslationServiceClient" - }, - "fullName": "google.cloud.bigquery_migration_v2alpha.SqlTranslationServiceClient.translate_query", - "method": { - "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService.TranslateQuery", - "service": { - "fullName": "google.cloud.bigquery.migration.v2alpha.SqlTranslationService", - "shortName": "SqlTranslationService" - }, - "shortName": "TranslateQuery" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "source_dialect", - "type": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryRequest.SqlTranslationSourceDialect" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_migration_v2alpha.types.TranslateQueryResponse", - "shortName": "translate_query" - }, - "description": "Sample for TranslateQuery", - "file": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigquerymigration_v2alpha_generated_SqlTranslationService_TranslateQuery_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigquerymigration_v2alpha_generated_sql_translation_service_translate_query_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/scripts/fixup_bigquery_migration_v2alpha_keywords.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/scripts/fixup_bigquery_migration_v2alpha_keywords.py deleted file mode 100644 index 9b26ac91e4a2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/scripts/fixup_bigquery_migration_v2alpha_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_migrationCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_migration_workflow': ('parent', 'migration_workflow', ), - 'delete_migration_workflow': ('name', ), - 'get_migration_subtask': ('name', 'read_mask', ), - 'get_migration_workflow': ('name', 'read_mask', ), - 'list_migration_subtasks': ('parent', 'read_mask', 'page_size', 'page_token', 'filter', ), - 'list_migration_workflows': ('parent', 'read_mask', 'page_size', 'page_token', ), - 'start_migration_workflow': ('name', ), - 'translate_query': ('parent', 'source_dialect', 'query', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_migrationCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_migration client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/setup.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/setup.py deleted file mode 100644 index 142b19d72660..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-migration' - - -description = "Google Cloud Bigquery Migration API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_migration/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-migration" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/__init__.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py deleted file mode 100644 index 20b56a034edf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ /dev/null @@ -1,4385 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_migration_v2alpha.services.migration_service import MigrationServiceAsyncClient -from google.cloud.bigquery_migration_v2alpha.services.migration_service import MigrationServiceClient -from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers -from google.cloud.bigquery_migration_v2alpha.services.migration_service import transports -from google.cloud.bigquery_migration_v2alpha.types import assessment_task -from google.cloud.bigquery_migration_v2alpha.types import migration_entities -from google.cloud.bigquery_migration_v2alpha.types import migration_error_details -from google.cloud.bigquery_migration_v2alpha.types import migration_metrics -from google.cloud.bigquery_migration_v2alpha.types import migration_service -from google.cloud.bigquery_migration_v2alpha.types import translation_task -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import error_details_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MigrationServiceClient._get_default_mtls_endpoint(None) is None - assert MigrationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MigrationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MigrationServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MigrationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MigrationServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MigrationServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - MigrationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert MigrationServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert MigrationServiceClient._get_client_cert_source(None, False) is None - assert MigrationServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert MigrationServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert MigrationServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert MigrationServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - default_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert MigrationServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert MigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - assert MigrationServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert MigrationServiceClient._get_api_endpoint(None, None, default_universe, "always") == MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - assert MigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MigrationServiceClient.DEFAULT_MTLS_ENDPOINT - assert MigrationServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert MigrationServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - MigrationServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert MigrationServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert MigrationServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert MigrationServiceClient._get_universe_domain(None, None) == MigrationServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - MigrationServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = MigrationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = MigrationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (MigrationServiceClient, "grpc"), - (MigrationServiceAsyncClient, "grpc_asyncio"), -]) -def test_migration_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MigrationServiceGrpcTransport, "grpc"), - (transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_migration_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MigrationServiceClient, "grpc"), - (MigrationServiceAsyncClient, "grpc_asyncio"), -]) -def test_migration_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - - -def test_migration_service_client_get_transport_class(): - transport = MigrationServiceClient.get_transport_class() - available_transports = [ - transports.MigrationServiceGrpcTransport, - ] - assert transport in available_transports - - transport = MigrationServiceClient.get_transport_class("grpc") - assert transport == transports.MigrationServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -def test_migration_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MigrationServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MigrationServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", "true"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", "false"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_migration_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MigrationServiceClient, MigrationServiceAsyncClient -]) -@mock.patch.object(MigrationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MigrationServiceAsyncClient)) -def test_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - MigrationServiceClient, MigrationServiceAsyncClient -]) -@mock.patch.object(MigrationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceClient)) -@mock.patch.object(MigrationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MigrationServiceAsyncClient)) -def test_migration_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = MigrationServiceClient._DEFAULT_UNIVERSE - default_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = MigrationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_migration_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", grpc_helpers), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_migration_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_migration_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MigrationServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc", grpc_helpers), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_migration_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigquerymigration.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigquerymigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.CreateMigrationWorkflowRequest, - dict, -]) -def test_create_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - ) - response = client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.CreateMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -def test_create_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.CreateMigrationWorkflowRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.CreateMigrationWorkflowRequest( - parent='parent_value', - ) - -def test_create_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_migration_workflow] = mock_rpc - request = {} - client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_migration_workflow] = mock_rpc - - request = {} - await client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.CreateMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - response = await client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.CreateMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -@pytest.mark.asyncio -async def test_create_migration_workflow_async_from_dict(): - await test_create_migration_workflow_async(request_type=dict) - -def test_create_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.CreateMigrationWorkflowRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.CreateMigrationWorkflowRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - await client.create_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_migration_workflow( - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].migration_workflow - mock_val = migration_entities.MigrationWorkflow(name='name_value') - assert arg == mock_val - - -def test_create_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_migration_workflow( - migration_service.CreateMigrationWorkflowRequest(), - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_migration_workflow( - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].migration_workflow - mock_val = migration_entities.MigrationWorkflow(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_migration_workflow( - migration_service.CreateMigrationWorkflowRequest(), - parent='parent_value', - migration_workflow=migration_entities.MigrationWorkflow(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.GetMigrationWorkflowRequest, - dict, -]) -def test_get_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - ) - response = client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -def test_get_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.GetMigrationWorkflowRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.GetMigrationWorkflowRequest( - name='name_value', - ) - -def test_get_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_migration_workflow] = mock_rpc - request = {} - client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_migration_workflow] = mock_rpc - - request = {} - await client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.GetMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - response = await client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationWorkflow) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.state == migration_entities.MigrationWorkflow.State.DRAFT - - -@pytest.mark.asyncio -async def test_get_migration_workflow_async_from_dict(): - await test_get_migration_workflow_async(request_type=dict) - -def test_get_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - await client.get_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_migration_workflow( - migration_service.GetMigrationWorkflowRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationWorkflow() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_migration_workflow( - migration_service.GetMigrationWorkflowRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.ListMigrationWorkflowsRequest, - dict, -]) -def test_list_migration_workflows(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationWorkflowsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationWorkflowsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationWorkflowsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_migration_workflows_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.ListMigrationWorkflowsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_migration_workflows(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.ListMigrationWorkflowsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_migration_workflows_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_migration_workflows in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_migration_workflows] = mock_rpc - request = {} - client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_migration_workflows(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_migration_workflows in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_migration_workflows] = mock_rpc - - request = {} - await client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_migration_workflows(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_workflows_async(transport: str = 'grpc_asyncio', request_type=migration_service.ListMigrationWorkflowsRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationWorkflowsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationWorkflowsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_from_dict(): - await test_list_migration_workflows_async(request_type=dict) - -def test_list_migration_workflows_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationWorkflowsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value = migration_service.ListMigrationWorkflowsResponse() - client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_migration_workflows_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationWorkflowsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse()) - await client.list_migration_workflows(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_migration_workflows_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationWorkflowsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_migration_workflows( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_migration_workflows_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_migration_workflows( - migration_service.ListMigrationWorkflowsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_migration_workflows_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationWorkflowsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_migration_workflows( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_migration_workflows_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_migration_workflows( - migration_service.ListMigrationWorkflowsRequest(), - parent='parent_value', - ) - - -def test_list_migration_workflows_pager(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_migration_workflows(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, migration_entities.MigrationWorkflow) - for i in results) -def test_list_migration_workflows_pages(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - pages = list(client.list_migration_workflows(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_pager(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_migration_workflows(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, migration_entities.MigrationWorkflow) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_migration_workflows_async_pages(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[], - next_page_token='def', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationWorkflowsResponse( - migration_workflows=[ - migration_entities.MigrationWorkflow(), - migration_entities.MigrationWorkflow(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_migration_workflows(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - migration_service.DeleteMigrationWorkflowRequest, - dict, -]) -def test_delete_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.DeleteMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.DeleteMigrationWorkflowRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.DeleteMigrationWorkflowRequest( - name='name_value', - ) - -def test_delete_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_migration_workflow] = mock_rpc - request = {} - client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_migration_workflow] = mock_rpc - - request = {} - await client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.DeleteMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.DeleteMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_migration_workflow_async_from_dict(): - await test_delete_migration_workflow_async(request_type=dict) - -def test_delete_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.DeleteMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value = None - client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.DeleteMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_migration_workflow( - migration_service.DeleteMigrationWorkflowRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_migration_workflow( - migration_service.DeleteMigrationWorkflowRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.StartMigrationWorkflowRequest, - dict, -]) -def test_start_migration_workflow(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.StartMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_start_migration_workflow_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.StartMigrationWorkflowRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.start_migration_workflow(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.StartMigrationWorkflowRequest( - name='name_value', - ) - -def test_start_migration_workflow_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.start_migration_workflow in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.start_migration_workflow] = mock_rpc - request = {} - client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.start_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_migration_workflow_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.start_migration_workflow in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.start_migration_workflow] = mock_rpc - - request = {} - await client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.start_migration_workflow(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_start_migration_workflow_async(transport: str = 'grpc_asyncio', request_type=migration_service.StartMigrationWorkflowRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.StartMigrationWorkflowRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_start_migration_workflow_async_from_dict(): - await test_start_migration_workflow_async(request_type=dict) - -def test_start_migration_workflow_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.StartMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value = None - client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_start_migration_workflow_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.StartMigrationWorkflowRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.start_migration_workflow(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_start_migration_workflow_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.start_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_start_migration_workflow_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.start_migration_workflow( - migration_service.StartMigrationWorkflowRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_start_migration_workflow_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.start_migration_workflow( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_start_migration_workflow_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.start_migration_workflow( - migration_service.StartMigrationWorkflowRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.GetMigrationSubtaskRequest, - dict, -]) -def test_get_migration_subtask(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationSubtask( - name='name_value', - task_id='task_id_value', - type_='type__value', - state=migration_entities.MigrationSubtask.State.ACTIVE, - resource_error_count=2169, - ) - response = client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationSubtaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationSubtask) - assert response.name == 'name_value' - assert response.task_id == 'task_id_value' - assert response.type_ == 'type__value' - assert response.state == migration_entities.MigrationSubtask.State.ACTIVE - assert response.resource_error_count == 2169 - - -def test_get_migration_subtask_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.GetMigrationSubtaskRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_migration_subtask(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.GetMigrationSubtaskRequest( - name='name_value', - ) - -def test_get_migration_subtask_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_migration_subtask in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_migration_subtask] = mock_rpc - request = {} - client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_migration_subtask(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_subtask_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_migration_subtask in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_migration_subtask] = mock_rpc - - request = {} - await client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_migration_subtask(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_migration_subtask_async(transport: str = 'grpc_asyncio', request_type=migration_service.GetMigrationSubtaskRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask( - name='name_value', - task_id='task_id_value', - type_='type__value', - state=migration_entities.MigrationSubtask.State.ACTIVE, - resource_error_count=2169, - )) - response = await client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.GetMigrationSubtaskRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, migration_entities.MigrationSubtask) - assert response.name == 'name_value' - assert response.task_id == 'task_id_value' - assert response.type_ == 'type__value' - assert response.state == migration_entities.MigrationSubtask.State.ACTIVE - assert response.resource_error_count == 2169 - - -@pytest.mark.asyncio -async def test_get_migration_subtask_async_from_dict(): - await test_get_migration_subtask_async(request_type=dict) - -def test_get_migration_subtask_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationSubtaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value = migration_entities.MigrationSubtask() - client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_migration_subtask_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.GetMigrationSubtaskRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask()) - await client.get_migration_subtask(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_migration_subtask_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationSubtask() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_migration_subtask( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_migration_subtask_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_migration_subtask( - migration_service.GetMigrationSubtaskRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_migration_subtask_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_entities.MigrationSubtask() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_migration_subtask( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_migration_subtask_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_migration_subtask( - migration_service.GetMigrationSubtaskRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - migration_service.ListMigrationSubtasksRequest, - dict, -]) -def test_list_migration_subtasks(request_type, transport: str = 'grpc'): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationSubtasksResponse( - next_page_token='next_page_token_value', - ) - response = client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationSubtasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationSubtasksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_migration_subtasks_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = migration_service.ListMigrationSubtasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_migration_subtasks(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == migration_service.ListMigrationSubtasksRequest( - parent='parent_value', - page_token='page_token_value', - filter='filter_value', - ) - -def test_list_migration_subtasks_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_migration_subtasks in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_migration_subtasks] = mock_rpc - request = {} - client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_migration_subtasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_migration_subtasks in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_migration_subtasks] = mock_rpc - - request = {} - await client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_migration_subtasks(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async(transport: str = 'grpc_asyncio', request_type=migration_service.ListMigrationSubtasksRequest): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = migration_service.ListMigrationSubtasksRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMigrationSubtasksAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_from_dict(): - await test_list_migration_subtasks_async(request_type=dict) - -def test_list_migration_subtasks_field_headers(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationSubtasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value = migration_service.ListMigrationSubtasksResponse() - client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_migration_subtasks_field_headers_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = migration_service.ListMigrationSubtasksRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse()) - await client.list_migration_subtasks(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_migration_subtasks_flattened(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationSubtasksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_migration_subtasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_migration_subtasks_flattened_error(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_migration_subtasks( - migration_service.ListMigrationSubtasksRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_migration_subtasks_flattened_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = migration_service.ListMigrationSubtasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_migration_subtasks( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_migration_subtasks_flattened_error_async(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_migration_subtasks( - migration_service.ListMigrationSubtasksRequest(), - parent='parent_value', - ) - - -def test_list_migration_subtasks_pager(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_migration_subtasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, migration_entities.MigrationSubtask) - for i in results) -def test_list_migration_subtasks_pages(transport_name: str = "grpc"): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - pages = list(client.list_migration_subtasks(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_pager(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_migration_subtasks(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, migration_entities.MigrationSubtask) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_migration_subtasks_async_pages(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - next_page_token='abc', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[], - next_page_token='def', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - ], - next_page_token='ghi', - ), - migration_service.ListMigrationSubtasksResponse( - migration_subtasks=[ - migration_entities.MigrationSubtask(), - migration_entities.MigrationSubtask(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_migration_subtasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MigrationServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MigrationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MigrationServiceGrpcTransport, - transports.MigrationServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = MigrationServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.create_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.CreateMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - call.return_value = migration_entities.MigrationWorkflow() - client.get_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_migration_workflows_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - call.return_value = migration_service.ListMigrationWorkflowsResponse() - client.list_migration_workflows(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationWorkflowsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - call.return_value = None - client.delete_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.DeleteMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_start_migration_workflow_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - call.return_value = None - client.start_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.StartMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_migration_subtask_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - call.return_value = migration_entities.MigrationSubtask() - client.get_migration_subtask(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationSubtaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_migration_subtasks_empty_call_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - call.return_value = migration_service.ListMigrationSubtasksResponse() - client.list_migration_subtasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationSubtasksRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = MigrationServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - await client.create_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.CreateMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationWorkflow( - name='name_value', - display_name='display_name_value', - state=migration_entities.MigrationWorkflow.State.DRAFT, - )) - await client.get_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_migration_workflows_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_workflows), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationWorkflowsResponse( - next_page_token='next_page_token_value', - )) - await client.list_migration_workflows(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationWorkflowsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.DeleteMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_start_migration_workflow_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.start_migration_workflow), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.start_migration_workflow(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.StartMigrationWorkflowRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_migration_subtask_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_migration_subtask), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_entities.MigrationSubtask( - name='name_value', - task_id='task_id_value', - type_='type__value', - state=migration_entities.MigrationSubtask.State.ACTIVE, - resource_error_count=2169, - )) - await client.get_migration_subtask(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.GetMigrationSubtaskRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_migration_subtasks_empty_call_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_migration_subtasks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(migration_service.ListMigrationSubtasksResponse( - next_page_token='next_page_token_value', - )) - await client.list_migration_subtasks(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = migration_service.ListMigrationSubtasksRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MigrationServiceGrpcTransport, - ) - -def test_migration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_migration_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.MigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_migration_workflow', - 'get_migration_workflow', - 'list_migration_workflows', - 'delete_migration_workflow', - 'start_migration_workflow', - 'get_migration_subtask', - 'list_migration_subtasks', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_migration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_migration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MigrationServiceTransport() - adc.assert_called_once() - - -def test_migration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MigrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MigrationServiceGrpcTransport, - transports.MigrationServiceGrpcAsyncIOTransport, - ], -) -def test_migration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MigrationServiceGrpcTransport, - transports.MigrationServiceGrpcAsyncIOTransport, - ], -) -def test_migration_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MigrationServiceGrpcTransport, grpc_helpers), - (transports.MigrationServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_migration_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigquerymigration.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigquerymigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MigrationServiceGrpcTransport, transports.MigrationServiceGrpcAsyncIOTransport]) -def test_migration_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_migration_service_host_no_port(transport_name): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerymigration.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_migration_service_host_with_port(transport_name): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerymigration.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:8000' - ) - -def test_migration_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_migration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MigrationServiceGrpcTransport, transports.MigrationServiceGrpcAsyncIOTransport]) -def test_migration_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MigrationServiceGrpcTransport, transports.MigrationServiceGrpcAsyncIOTransport]) -def test_migration_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_migration_subtask_path(): - project = "squid" - location = "clam" - workflow = "whelk" - subtask = "octopus" - expected = "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format(project=project, location=location, workflow=workflow, subtask=subtask, ) - actual = MigrationServiceClient.migration_subtask_path(project, location, workflow, subtask) - assert expected == actual - - -def test_parse_migration_subtask_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "workflow": "cuttlefish", - "subtask": "mussel", - } - path = MigrationServiceClient.migration_subtask_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_migration_subtask_path(path) - assert expected == actual - -def test_migration_workflow_path(): - project = "winkle" - location = "nautilus" - workflow = "scallop" - expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) - actual = MigrationServiceClient.migration_workflow_path(project, location, workflow) - assert expected == actual - - -def test_parse_migration_workflow_path(): - expected = { - "project": "abalone", - "location": "squid", - "workflow": "clam", - } - path = MigrationServiceClient.migration_workflow_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_migration_workflow_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "whelk" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MigrationServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "octopus", - } - path = MigrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "oyster" - expected = "folders/{folder}".format(folder=folder, ) - actual = MigrationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "nudibranch", - } - path = MigrationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MigrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "mussel", - } - path = MigrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "winkle" - expected = "projects/{project}".format(project=project, ) - actual = MigrationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "nautilus", - } - path = MigrationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "scallop" - location = "abalone" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MigrationServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "squid", - "location": "clam", - } - path = MigrationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MigrationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MigrationServiceTransport, '_prep_wrapped_messages') as prep: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MigrationServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = MigrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = MigrationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = MigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport), - (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py b/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py deleted file mode 100644 index 08eaba13172b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-migration/v2alpha/tests/unit/gapic/bigquery_migration_v2alpha/test_sql_translation_service.py +++ /dev/null @@ -1,1765 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_migration_v2alpha.services.sql_translation_service import SqlTranslationServiceAsyncClient -from google.cloud.bigquery_migration_v2alpha.services.sql_translation_service import SqlTranslationServiceClient -from google.cloud.bigquery_migration_v2alpha.services.sql_translation_service import transports -from google.cloud.bigquery_migration_v2alpha.types import translation_service -from google.oauth2 import service_account -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SqlTranslationServiceClient._get_default_mtls_endpoint(None) is None - assert SqlTranslationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SqlTranslationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SqlTranslationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SqlTranslationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SqlTranslationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert SqlTranslationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert SqlTranslationServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert SqlTranslationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - SqlTranslationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert SqlTranslationServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert SqlTranslationServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert SqlTranslationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - SqlTranslationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert SqlTranslationServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert SqlTranslationServiceClient._get_client_cert_source(None, False) is None - assert SqlTranslationServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert SqlTranslationServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert SqlTranslationServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert SqlTranslationServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(SqlTranslationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceClient)) -@mock.patch.object(SqlTranslationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = SqlTranslationServiceClient._DEFAULT_UNIVERSE - default_endpoint = SqlTranslationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = SqlTranslationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert SqlTranslationServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert SqlTranslationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == SqlTranslationServiceClient.DEFAULT_MTLS_ENDPOINT - assert SqlTranslationServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert SqlTranslationServiceClient._get_api_endpoint(None, None, default_universe, "always") == SqlTranslationServiceClient.DEFAULT_MTLS_ENDPOINT - assert SqlTranslationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == SqlTranslationServiceClient.DEFAULT_MTLS_ENDPOINT - assert SqlTranslationServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert SqlTranslationServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - SqlTranslationServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert SqlTranslationServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert SqlTranslationServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert SqlTranslationServiceClient._get_universe_domain(None, None) == SqlTranslationServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - SqlTranslationServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = SqlTranslationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = SqlTranslationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (SqlTranslationServiceClient, "grpc"), - (SqlTranslationServiceAsyncClient, "grpc_asyncio"), -]) -def test_sql_translation_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SqlTranslationServiceGrpcTransport, "grpc"), - (transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_sql_translation_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SqlTranslationServiceClient, "grpc"), - (SqlTranslationServiceAsyncClient, "grpc_asyncio"), -]) -def test_sql_translation_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - - -def test_sql_translation_service_client_get_transport_class(): - transport = SqlTranslationServiceClient.get_transport_class() - available_transports = [ - transports.SqlTranslationServiceGrpcTransport, - ] - assert transport in available_transports - - transport = SqlTranslationServiceClient.get_transport_class("grpc") - assert transport == transports.SqlTranslationServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport, "grpc"), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(SqlTranslationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceClient)) -@mock.patch.object(SqlTranslationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceAsyncClient)) -def test_sql_translation_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SqlTranslationServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SqlTranslationServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport, "grpc", "true"), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport, "grpc", "false"), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(SqlTranslationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceClient)) -@mock.patch.object(SqlTranslationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_sql_translation_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - SqlTranslationServiceClient, SqlTranslationServiceAsyncClient -]) -@mock.patch.object(SqlTranslationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SqlTranslationServiceClient)) -@mock.patch.object(SqlTranslationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SqlTranslationServiceAsyncClient)) -def test_sql_translation_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - SqlTranslationServiceClient, SqlTranslationServiceAsyncClient -]) -@mock.patch.object(SqlTranslationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceClient)) -@mock.patch.object(SqlTranslationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(SqlTranslationServiceAsyncClient)) -def test_sql_translation_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = SqlTranslationServiceClient._DEFAULT_UNIVERSE - default_endpoint = SqlTranslationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = SqlTranslationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport, "grpc"), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_sql_translation_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport, "grpc", grpc_helpers), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_sql_translation_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_sql_translation_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SqlTranslationServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport, "grpc", grpc_helpers), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_sql_translation_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigquerymigration.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigquerymigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - translation_service.TranslateQueryRequest, - dict, -]) -def test_translate_query(request_type, transport: str = 'grpc'): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = translation_service.TranslateQueryResponse( - translation_job='translation_job_value', - translated_query='translated_query_value', - ) - response = client.translate_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = translation_service.TranslateQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateQueryResponse) - assert response.translation_job == 'translation_job_value' - assert response.translated_query == 'translated_query_value' - - -def test_translate_query_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = translation_service.TranslateQueryRequest( - parent='parent_value', - query='query_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.translate_query(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == translation_service.TranslateQueryRequest( - parent='parent_value', - query='query_value', - ) - -def test_translate_query_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.translate_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.translate_query] = mock_rpc - request = {} - client.translate_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.translate_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_translate_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.translate_query in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.translate_query] = mock_rpc - - request = {} - await client.translate_query(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.translate_query(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_translate_query_async(transport: str = 'grpc_asyncio', request_type=translation_service.TranslateQueryRequest): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(translation_service.TranslateQueryResponse( - translation_job='translation_job_value', - translated_query='translated_query_value', - )) - response = await client.translate_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = translation_service.TranslateQueryRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, translation_service.TranslateQueryResponse) - assert response.translation_job == 'translation_job_value' - assert response.translated_query == 'translated_query_value' - - -@pytest.mark.asyncio -async def test_translate_query_async_from_dict(): - await test_translate_query_async(request_type=dict) - -def test_translate_query_field_headers(): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = translation_service.TranslateQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - call.return_value = translation_service.TranslateQueryResponse() - client.translate_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_translate_query_field_headers_async(): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = translation_service.TranslateQueryRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(translation_service.TranslateQueryResponse()) - await client.translate_query(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_translate_query_flattened(): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = translation_service.TranslateQueryResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.translate_query( - parent='parent_value', - source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].source_dialect - mock_val = translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_translate_query_flattened_error(): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.translate_query( - translation_service.TranslateQueryRequest(), - parent='parent_value', - source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, - query='query_value', - ) - -@pytest.mark.asyncio -async def test_translate_query_flattened_async(): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = translation_service.TranslateQueryResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(translation_service.TranslateQueryResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.translate_query( - parent='parent_value', - source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].source_dialect - mock_val = translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_translate_query_flattened_error_async(): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.translate_query( - translation_service.TranslateQueryRequest(), - parent='parent_value', - source_dialect=translation_service.TranslateQueryRequest.SqlTranslationSourceDialect.TERADATA, - query='query_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SqlTranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SqlTranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SqlTranslationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.SqlTranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SqlTranslationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SqlTranslationServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SqlTranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SqlTranslationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SqlTranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SqlTranslationServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SqlTranslationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SqlTranslationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SqlTranslationServiceGrpcTransport, - transports.SqlTranslationServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = SqlTranslationServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_translate_query_empty_call_grpc(): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - call.return_value = translation_service.TranslateQueryResponse() - client.translate_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = translation_service.TranslateQueryRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = SqlTranslationServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_translate_query_empty_call_grpc_asyncio(): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.translate_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(translation_service.TranslateQueryResponse( - translation_job='translation_job_value', - translated_query='translated_query_value', - )) - await client.translate_query(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = translation_service.TranslateQueryRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SqlTranslationServiceGrpcTransport, - ) - -def test_sql_translation_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SqlTranslationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_sql_translation_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.SqlTranslationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'translate_query', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_sql_translation_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SqlTranslationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_sql_translation_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_migration_v2alpha.services.sql_translation_service.transports.SqlTranslationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SqlTranslationServiceTransport() - adc.assert_called_once() - - -def test_sql_translation_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SqlTranslationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SqlTranslationServiceGrpcTransport, - transports.SqlTranslationServiceGrpcAsyncIOTransport, - ], -) -def test_sql_translation_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SqlTranslationServiceGrpcTransport, - transports.SqlTranslationServiceGrpcAsyncIOTransport, - ], -) -def test_sql_translation_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SqlTranslationServiceGrpcTransport, grpc_helpers), - (transports.SqlTranslationServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_sql_translation_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigquerymigration.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigquerymigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SqlTranslationServiceGrpcTransport, transports.SqlTranslationServiceGrpcAsyncIOTransport]) -def test_sql_translation_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_sql_translation_service_host_no_port(transport_name): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerymigration.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_sql_translation_service_host_with_port(transport_name): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigquerymigration.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigquerymigration.googleapis.com:8000' - ) - -def test_sql_translation_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SqlTranslationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_sql_translation_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SqlTranslationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SqlTranslationServiceGrpcTransport, transports.SqlTranslationServiceGrpcAsyncIOTransport]) -def test_sql_translation_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SqlTranslationServiceGrpcTransport, transports.SqlTranslationServiceGrpcAsyncIOTransport]) -def test_sql_translation_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SqlTranslationServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = SqlTranslationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SqlTranslationServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = SqlTranslationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = SqlTranslationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SqlTranslationServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SqlTranslationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = SqlTranslationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SqlTranslationServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = SqlTranslationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = SqlTranslationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SqlTranslationServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SqlTranslationServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = SqlTranslationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SqlTranslationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SqlTranslationServiceTransport, '_prep_wrapped_messages') as prep: - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SqlTranslationServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = SqlTranslationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = SqlTranslationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = SqlTranslationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (SqlTranslationServiceClient, transports.SqlTranslationServiceGrpcTransport), - (SqlTranslationServiceAsyncClient, transports.SqlTranslationServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/.coveragerc b/owl-bot-staging/google-cloud-bigquery-reservation/v1/.coveragerc deleted file mode 100644 index 24e124ab08de..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_reservation/__init__.py - google/cloud/bigquery_reservation/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/.flake8 b/owl-bot-staging/google-cloud-bigquery-reservation/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/MANIFEST.in b/owl-bot-staging/google-cloud-bigquery-reservation/v1/MANIFEST.in deleted file mode 100644 index c1c2e1b5d84e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_reservation *.py -recursive-include google/cloud/bigquery_reservation_v1 *.py diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/README.rst b/owl-bot-staging/google-cloud-bigquery-reservation/v1/README.rst deleted file mode 100644 index 1cfbfbc32063..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Bigquery Reservation API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Reservation API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/reservation_service.rst b/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/reservation_service.rst deleted file mode 100644 index 35e6b3cfd676..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/reservation_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -ReservationService ------------------------------------- - -.. automodule:: google.cloud.bigquery_reservation_v1.services.reservation_service - :members: - :inherited-members: - -.. automodule:: google.cloud.bigquery_reservation_v1.services.reservation_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/services_.rst b/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/services_.rst deleted file mode 100644 index 3f9e9577f53c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Bigquery Reservation v1 API -===================================================== -.. toctree:: - :maxdepth: 2 - - reservation_service diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/types_.rst b/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/types_.rst deleted file mode 100644 index 96a1583d3f20..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/bigquery_reservation_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Bigquery Reservation v1 API -================================================== - -.. automodule:: google.cloud.bigquery_reservation_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/conf.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/conf.py deleted file mode 100644 index 21b01cdf2bf4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-reservation documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-reservation" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-reservation-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-bigquery-reservation.tex", - u"google-cloud-bigquery-reservation Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-bigquery-reservation", - u"Google Cloud Bigquery Reservation Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-bigquery-reservation", - u"google-cloud-bigquery-reservation Documentation", - author, - "google-cloud-bigquery-reservation", - "GAPIC library for Google Cloud Bigquery Reservation API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/index.rst b/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/index.rst deleted file mode 100644 index 9392372a2e66..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_reservation_v1/services_ - bigquery_reservation_v1/types_ diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/__init__.py deleted file mode 100644 index e7db23e924ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/__init__.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_reservation import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.bigquery_reservation_v1.services.reservation_service.client import ReservationServiceClient -from google.cloud.bigquery_reservation_v1.services.reservation_service.async_client import ReservationServiceAsyncClient - -from google.cloud.bigquery_reservation_v1.types.reservation import Assignment -from google.cloud.bigquery_reservation_v1.types.reservation import BiReservation -from google.cloud.bigquery_reservation_v1.types.reservation import CapacityCommitment -from google.cloud.bigquery_reservation_v1.types.reservation import CreateAssignmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import CreateCapacityCommitmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import CreateReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import DeleteAssignmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import DeleteCapacityCommitmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import DeleteReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import FailoverReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import GetBiReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import GetCapacityCommitmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import GetReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import ListAssignmentsRequest -from google.cloud.bigquery_reservation_v1.types.reservation import ListAssignmentsResponse -from google.cloud.bigquery_reservation_v1.types.reservation import ListCapacityCommitmentsRequest -from google.cloud.bigquery_reservation_v1.types.reservation import ListCapacityCommitmentsResponse -from google.cloud.bigquery_reservation_v1.types.reservation import ListReservationsRequest -from google.cloud.bigquery_reservation_v1.types.reservation import ListReservationsResponse -from google.cloud.bigquery_reservation_v1.types.reservation import MergeCapacityCommitmentsRequest -from google.cloud.bigquery_reservation_v1.types.reservation import MoveAssignmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import Reservation -from google.cloud.bigquery_reservation_v1.types.reservation import SearchAllAssignmentsRequest -from google.cloud.bigquery_reservation_v1.types.reservation import SearchAllAssignmentsResponse -from google.cloud.bigquery_reservation_v1.types.reservation import SearchAssignmentsRequest -from google.cloud.bigquery_reservation_v1.types.reservation import SearchAssignmentsResponse -from google.cloud.bigquery_reservation_v1.types.reservation import SplitCapacityCommitmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import SplitCapacityCommitmentResponse -from google.cloud.bigquery_reservation_v1.types.reservation import TableReference -from google.cloud.bigquery_reservation_v1.types.reservation import UpdateAssignmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import UpdateBiReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import UpdateCapacityCommitmentRequest -from google.cloud.bigquery_reservation_v1.types.reservation import UpdateReservationRequest -from google.cloud.bigquery_reservation_v1.types.reservation import Edition - -__all__ = ('ReservationServiceClient', - 'ReservationServiceAsyncClient', - 'Assignment', - 'BiReservation', - 'CapacityCommitment', - 'CreateAssignmentRequest', - 'CreateCapacityCommitmentRequest', - 'CreateReservationRequest', - 'DeleteAssignmentRequest', - 'DeleteCapacityCommitmentRequest', - 'DeleteReservationRequest', - 'FailoverReservationRequest', - 'GetBiReservationRequest', - 'GetCapacityCommitmentRequest', - 'GetReservationRequest', - 'ListAssignmentsRequest', - 'ListAssignmentsResponse', - 'ListCapacityCommitmentsRequest', - 'ListCapacityCommitmentsResponse', - 'ListReservationsRequest', - 'ListReservationsResponse', - 'MergeCapacityCommitmentsRequest', - 'MoveAssignmentRequest', - 'Reservation', - 'SearchAllAssignmentsRequest', - 'SearchAllAssignmentsResponse', - 'SearchAssignmentsRequest', - 'SearchAssignmentsResponse', - 'SplitCapacityCommitmentRequest', - 'SplitCapacityCommitmentResponse', - 'TableReference', - 'UpdateAssignmentRequest', - 'UpdateBiReservationRequest', - 'UpdateCapacityCommitmentRequest', - 'UpdateReservationRequest', - 'Edition', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/py.typed b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/py.typed deleted file mode 100644 index ff261ee1d848..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-reservation package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/__init__.py deleted file mode 100644 index 4c6822c0a1c0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/__init__.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.bigquery_reservation_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.reservation_service import ReservationServiceClient -from .services.reservation_service import ReservationServiceAsyncClient - -from .types.reservation import Assignment -from .types.reservation import BiReservation -from .types.reservation import CapacityCommitment -from .types.reservation import CreateAssignmentRequest -from .types.reservation import CreateCapacityCommitmentRequest -from .types.reservation import CreateReservationRequest -from .types.reservation import DeleteAssignmentRequest -from .types.reservation import DeleteCapacityCommitmentRequest -from .types.reservation import DeleteReservationRequest -from .types.reservation import FailoverReservationRequest -from .types.reservation import GetBiReservationRequest -from .types.reservation import GetCapacityCommitmentRequest -from .types.reservation import GetReservationRequest -from .types.reservation import ListAssignmentsRequest -from .types.reservation import ListAssignmentsResponse -from .types.reservation import ListCapacityCommitmentsRequest -from .types.reservation import ListCapacityCommitmentsResponse -from .types.reservation import ListReservationsRequest -from .types.reservation import ListReservationsResponse -from .types.reservation import MergeCapacityCommitmentsRequest -from .types.reservation import MoveAssignmentRequest -from .types.reservation import Reservation -from .types.reservation import SearchAllAssignmentsRequest -from .types.reservation import SearchAllAssignmentsResponse -from .types.reservation import SearchAssignmentsRequest -from .types.reservation import SearchAssignmentsResponse -from .types.reservation import SplitCapacityCommitmentRequest -from .types.reservation import SplitCapacityCommitmentResponse -from .types.reservation import TableReference -from .types.reservation import UpdateAssignmentRequest -from .types.reservation import UpdateBiReservationRequest -from .types.reservation import UpdateCapacityCommitmentRequest -from .types.reservation import UpdateReservationRequest -from .types.reservation import Edition - -__all__ = ( - 'ReservationServiceAsyncClient', -'Assignment', -'BiReservation', -'CapacityCommitment', -'CreateAssignmentRequest', -'CreateCapacityCommitmentRequest', -'CreateReservationRequest', -'DeleteAssignmentRequest', -'DeleteCapacityCommitmentRequest', -'DeleteReservationRequest', -'Edition', -'FailoverReservationRequest', -'GetBiReservationRequest', -'GetCapacityCommitmentRequest', -'GetReservationRequest', -'ListAssignmentsRequest', -'ListAssignmentsResponse', -'ListCapacityCommitmentsRequest', -'ListCapacityCommitmentsResponse', -'ListReservationsRequest', -'ListReservationsResponse', -'MergeCapacityCommitmentsRequest', -'MoveAssignmentRequest', -'Reservation', -'ReservationServiceClient', -'SearchAllAssignmentsRequest', -'SearchAllAssignmentsResponse', -'SearchAssignmentsRequest', -'SearchAssignmentsResponse', -'SplitCapacityCommitmentRequest', -'SplitCapacityCommitmentResponse', -'TableReference', -'UpdateAssignmentRequest', -'UpdateBiReservationRequest', -'UpdateCapacityCommitmentRequest', -'UpdateReservationRequest', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/gapic_metadata.json deleted file mode 100644 index 385c3265ef89..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/gapic_metadata.json +++ /dev/null @@ -1,358 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_reservation_v1", - "protoPackage": "google.cloud.bigquery.reservation.v1", - "schema": "1.0", - "services": { - "ReservationService": { - "clients": { - "grpc": { - "libraryClient": "ReservationServiceClient", - "rpcs": { - "CreateAssignment": { - "methods": [ - "create_assignment" - ] - }, - "CreateCapacityCommitment": { - "methods": [ - "create_capacity_commitment" - ] - }, - "CreateReservation": { - "methods": [ - "create_reservation" - ] - }, - "DeleteAssignment": { - "methods": [ - "delete_assignment" - ] - }, - "DeleteCapacityCommitment": { - "methods": [ - "delete_capacity_commitment" - ] - }, - "DeleteReservation": { - "methods": [ - "delete_reservation" - ] - }, - "FailoverReservation": { - "methods": [ - "failover_reservation" - ] - }, - "GetBiReservation": { - "methods": [ - "get_bi_reservation" - ] - }, - "GetCapacityCommitment": { - "methods": [ - "get_capacity_commitment" - ] - }, - "GetReservation": { - "methods": [ - "get_reservation" - ] - }, - "ListAssignments": { - "methods": [ - "list_assignments" - ] - }, - "ListCapacityCommitments": { - "methods": [ - "list_capacity_commitments" - ] - }, - "ListReservations": { - "methods": [ - "list_reservations" - ] - }, - "MergeCapacityCommitments": { - "methods": [ - "merge_capacity_commitments" - ] - }, - "MoveAssignment": { - "methods": [ - "move_assignment" - ] - }, - "SearchAllAssignments": { - "methods": [ - "search_all_assignments" - ] - }, - "SearchAssignments": { - "methods": [ - "search_assignments" - ] - }, - "SplitCapacityCommitment": { - "methods": [ - "split_capacity_commitment" - ] - }, - "UpdateAssignment": { - "methods": [ - "update_assignment" - ] - }, - "UpdateBiReservation": { - "methods": [ - "update_bi_reservation" - ] - }, - "UpdateCapacityCommitment": { - "methods": [ - "update_capacity_commitment" - ] - }, - "UpdateReservation": { - "methods": [ - "update_reservation" - ] - } - } - }, - "grpc-async": { - "libraryClient": "ReservationServiceAsyncClient", - "rpcs": { - "CreateAssignment": { - "methods": [ - "create_assignment" - ] - }, - "CreateCapacityCommitment": { - "methods": [ - "create_capacity_commitment" - ] - }, - "CreateReservation": { - "methods": [ - "create_reservation" - ] - }, - "DeleteAssignment": { - "methods": [ - "delete_assignment" - ] - }, - "DeleteCapacityCommitment": { - "methods": [ - "delete_capacity_commitment" - ] - }, - "DeleteReservation": { - "methods": [ - "delete_reservation" - ] - }, - "FailoverReservation": { - "methods": [ - "failover_reservation" - ] - }, - "GetBiReservation": { - "methods": [ - "get_bi_reservation" - ] - }, - "GetCapacityCommitment": { - "methods": [ - "get_capacity_commitment" - ] - }, - "GetReservation": { - "methods": [ - "get_reservation" - ] - }, - "ListAssignments": { - "methods": [ - "list_assignments" - ] - }, - "ListCapacityCommitments": { - "methods": [ - "list_capacity_commitments" - ] - }, - "ListReservations": { - "methods": [ - "list_reservations" - ] - }, - "MergeCapacityCommitments": { - "methods": [ - "merge_capacity_commitments" - ] - }, - "MoveAssignment": { - "methods": [ - "move_assignment" - ] - }, - "SearchAllAssignments": { - "methods": [ - "search_all_assignments" - ] - }, - "SearchAssignments": { - "methods": [ - "search_assignments" - ] - }, - "SplitCapacityCommitment": { - "methods": [ - "split_capacity_commitment" - ] - }, - "UpdateAssignment": { - "methods": [ - "update_assignment" - ] - }, - "UpdateBiReservation": { - "methods": [ - "update_bi_reservation" - ] - }, - "UpdateCapacityCommitment": { - "methods": [ - "update_capacity_commitment" - ] - }, - "UpdateReservation": { - "methods": [ - "update_reservation" - ] - } - } - }, - "rest": { - "libraryClient": "ReservationServiceClient", - "rpcs": { - "CreateAssignment": { - "methods": [ - "create_assignment" - ] - }, - "CreateCapacityCommitment": { - "methods": [ - "create_capacity_commitment" - ] - }, - "CreateReservation": { - "methods": [ - "create_reservation" - ] - }, - "DeleteAssignment": { - "methods": [ - "delete_assignment" - ] - }, - "DeleteCapacityCommitment": { - "methods": [ - "delete_capacity_commitment" - ] - }, - "DeleteReservation": { - "methods": [ - "delete_reservation" - ] - }, - "FailoverReservation": { - "methods": [ - "failover_reservation" - ] - }, - "GetBiReservation": { - "methods": [ - "get_bi_reservation" - ] - }, - "GetCapacityCommitment": { - "methods": [ - "get_capacity_commitment" - ] - }, - "GetReservation": { - "methods": [ - "get_reservation" - ] - }, - "ListAssignments": { - "methods": [ - "list_assignments" - ] - }, - "ListCapacityCommitments": { - "methods": [ - "list_capacity_commitments" - ] - }, - "ListReservations": { - "methods": [ - "list_reservations" - ] - }, - "MergeCapacityCommitments": { - "methods": [ - "merge_capacity_commitments" - ] - }, - "MoveAssignment": { - "methods": [ - "move_assignment" - ] - }, - "SearchAllAssignments": { - "methods": [ - "search_all_assignments" - ] - }, - "SearchAssignments": { - "methods": [ - "search_assignments" - ] - }, - "SplitCapacityCommitment": { - "methods": [ - "split_capacity_commitment" - ] - }, - "UpdateAssignment": { - "methods": [ - "update_assignment" - ] - }, - "UpdateBiReservation": { - "methods": [ - "update_bi_reservation" - ] - }, - "UpdateCapacityCommitment": { - "methods": [ - "update_capacity_commitment" - ] - }, - "UpdateReservation": { - "methods": [ - "update_reservation" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/gapic_version.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/py.typed b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/py.typed deleted file mode 100644 index ff261ee1d848..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-reservation package uses inline types. diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/__init__.py deleted file mode 100644 index 83529dfc2cbd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import ReservationServiceClient -from .async_client import ReservationServiceAsyncClient - -__all__ = ( - 'ReservationServiceClient', - 'ReservationServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py deleted file mode 100644 index 5bcc1b4cf36d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py +++ /dev/null @@ -1,3017 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import warnings - -from google.cloud.bigquery_reservation_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import ReservationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import ReservationServiceGrpcAsyncIOTransport -from .client import ReservationServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class ReservationServiceAsyncClient: - """This API allows users to manage their BigQuery reservations. - - A reservation provides computational resource guarantees, in the - form of `slots `__, to - users. A slot is a unit of computational power in BigQuery, and - serves as the basic unit of parallelism. In a scan of a - multi-partitioned table, a single slot operates on a single - partition of the table. A reservation resource exists as a child - resource of the admin project and location, e.g.: - ``projects/myproject/locations/US/reservations/reservationName``. - - A capacity commitment is a way to purchase compute capacity for - BigQuery jobs (in the form of slots) with some committed period of - usage. A capacity commitment resource exists as a child resource of - the admin project and location, e.g.: - ``projects/myproject/locations/US/capacityCommitments/id``. - """ - - _client: ReservationServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = ReservationServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = ReservationServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = ReservationServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = ReservationServiceClient._DEFAULT_UNIVERSE - - assignment_path = staticmethod(ReservationServiceClient.assignment_path) - parse_assignment_path = staticmethod(ReservationServiceClient.parse_assignment_path) - bi_reservation_path = staticmethod(ReservationServiceClient.bi_reservation_path) - parse_bi_reservation_path = staticmethod(ReservationServiceClient.parse_bi_reservation_path) - capacity_commitment_path = staticmethod(ReservationServiceClient.capacity_commitment_path) - parse_capacity_commitment_path = staticmethod(ReservationServiceClient.parse_capacity_commitment_path) - reservation_path = staticmethod(ReservationServiceClient.reservation_path) - parse_reservation_path = staticmethod(ReservationServiceClient.parse_reservation_path) - common_billing_account_path = staticmethod(ReservationServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(ReservationServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(ReservationServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(ReservationServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(ReservationServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(ReservationServiceClient.parse_common_organization_path) - common_project_path = staticmethod(ReservationServiceClient.common_project_path) - parse_common_project_path = staticmethod(ReservationServiceClient.parse_common_project_path) - common_location_path = staticmethod(ReservationServiceClient.common_location_path) - parse_common_location_path = staticmethod(ReservationServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ReservationServiceAsyncClient: The constructed client. - """ - return ReservationServiceClient.from_service_account_info.__func__(ReservationServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ReservationServiceAsyncClient: The constructed client. - """ - return ReservationServiceClient.from_service_account_file.__func__(ReservationServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return ReservationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> ReservationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ReservationServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = ReservationServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ReservationServiceTransport, Callable[..., ReservationServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the reservation service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ReservationServiceTransport,Callable[..., ReservationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ReservationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = ReservationServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.reservation_v1.ReservationServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "credentialsType": None, - } - ) - - async def create_reservation(self, - request: Optional[Union[gcbr_reservation.CreateReservationRequest, dict]] = None, - *, - parent: Optional[str] = None, - reservation: Optional[gcbr_reservation.Reservation] = None, - reservation_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbr_reservation.Reservation: - r"""Creates a new reservation resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_create_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateReservationRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateReservationRequest, dict]]): - The request object. The request for - [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. - parent (:class:`str`): - Required. Project, location. E.g., - ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reservation (:class:`google.cloud.bigquery_reservation_v1.types.Reservation`): - Definition of the new reservation to - create. - - This corresponds to the ``reservation`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reservation_id (:class:`str`): - The reservation ID. It must only - contain lower case alphanumeric - characters or dashes. It must start with - a letter and must not end with a dash. - Its maximum length is 64 characters. - - This corresponds to the ``reservation_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, reservation, reservation_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbr_reservation.CreateReservationRequest): - request = gcbr_reservation.CreateReservationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if reservation is not None: - request.reservation = reservation - if reservation_id is not None: - request.reservation_id = reservation_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_reservations(self, - request: Optional[Union[reservation.ListReservationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListReservationsAsyncPager: - r"""Lists all the reservations for the project in the - specified location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_list_reservations(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListReservationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_reservations(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListReservationsRequest, dict]]): - The request object. The request for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - parent (:class:`str`): - Required. The parent resource name containing project - and location, e.g.: ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationsAsyncPager: - The response for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.ListReservationsRequest): - request = reservation.ListReservationsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_reservations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListReservationsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_reservation(self, - request: Optional[Union[reservation.GetReservationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Reservation: - r"""Returns information about the reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_get_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetReservationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetReservationRequest, dict]]): - The request object. The request for - [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. - name (:class:`str`): - Required. Resource name of the reservation to retrieve. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.GetReservationRequest): - request = reservation.GetReservationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_reservation(self, - request: Optional[Union[reservation.DeleteReservationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a reservation. Returns - ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has - assignments. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_delete_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteReservationRequest( - name="name_value", - ) - - # Make the request - await client.delete_reservation(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest, dict]]): - The request object. The request for - [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. - name (:class:`str`): - Required. Resource name of the reservation to retrieve. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.DeleteReservationRequest): - request = reservation.DeleteReservationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_reservation(self, - request: Optional[Union[gcbr_reservation.UpdateReservationRequest, dict]] = None, - *, - reservation: Optional[gcbr_reservation.Reservation] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbr_reservation.Reservation: - r"""Updates an existing reservation resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_update_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateReservationRequest( - ) - - # Make the request - response = await client.update_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateReservationRequest, dict]]): - The request object. The request for - [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. - reservation (:class:`google.cloud.bigquery_reservation_v1.types.Reservation`): - Content of the reservation to update. - This corresponds to the ``reservation`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Standard field mask for the set of - fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [reservation, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbr_reservation.UpdateReservationRequest): - request = gcbr_reservation.UpdateReservationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if reservation is not None: - request.reservation = reservation - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("reservation.name", request.reservation.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def failover_reservation(self, - request: Optional[Union[reservation.FailoverReservationRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Reservation: - r"""Fail over a reservation to the secondary location. The operation - should be done in the current secondary location, which will be - promoted to the new primary location for the reservation. - Attempting to failover a reservation in the current primary - location will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_failover_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.FailoverReservationRequest( - name="name_value", - ) - - # Make the request - response = await client.failover_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest, dict]]): - The request object. The request for - ReservationService.FailoverReservation. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.FailoverReservationRequest): - request = reservation.FailoverReservationRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.failover_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_capacity_commitment(self, - request: Optional[Union[reservation.CreateCapacityCommitmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - capacity_commitment: Optional[reservation.CapacityCommitment] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Creates a new capacity commitment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_create_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateCapacityCommitmentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateCapacityCommitmentRequest, dict]]): - The request object. The request for - [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. - parent (:class:`str`): - Required. Resource name of the parent reservation. E.g., - ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - capacity_commitment (:class:`google.cloud.bigquery_reservation_v1.types.CapacityCommitment`): - Content of the capacity commitment to - create. - - This corresponds to the ``capacity_commitment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, capacity_commitment] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.CreateCapacityCommitmentRequest): - request = reservation.CreateCapacityCommitmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if capacity_commitment is not None: - request.capacity_commitment = capacity_commitment - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_capacity_commitments(self, - request: Optional[Union[reservation.ListCapacityCommitmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCapacityCommitmentsAsyncPager: - r"""Lists all the capacity commitments for the admin - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_list_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListCapacityCommitmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_capacity_commitments(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest, dict]]): - The request object. The request for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - parent (:class:`str`): - Required. Resource name of the parent reservation. E.g., - ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsAsyncPager: - The response for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.ListCapacityCommitmentsRequest): - request = reservation.ListCapacityCommitmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_capacity_commitments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListCapacityCommitmentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_capacity_commitment(self, - request: Optional[Union[reservation.GetCapacityCommitmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Returns information about the capacity commitment. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_get_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest, dict]]): - The request object. The request for - [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. - name (:class:`str`): - Required. Resource name of the capacity commitment to - retrieve. E.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.GetCapacityCommitmentRequest): - request = reservation.GetCapacityCommitmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_capacity_commitment(self, - request: Optional[Union[reservation.DeleteCapacityCommitmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a capacity commitment. Attempting to delete capacity - commitment before its commitment_end_time will fail with the - error code ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_delete_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_capacity_commitment(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteCapacityCommitmentRequest, dict]]): - The request object. The request for - [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. - name (:class:`str`): - Required. Resource name of the capacity commitment to - delete. E.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.DeleteCapacityCommitmentRequest): - request = reservation.DeleteCapacityCommitmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def update_capacity_commitment(self, - request: Optional[Union[reservation.UpdateCapacityCommitmentRequest, dict]] = None, - *, - capacity_commitment: Optional[reservation.CapacityCommitment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Updates an existing capacity commitment. - - Only ``plan`` and ``renewal_plan`` fields can be updated. - - Plan can only be changed to a plan of a longer commitment - period. Attempting to change to a plan with shorter commitment - period will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_update_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateCapacityCommitmentRequest( - ) - - # Make the request - response = await client.update_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateCapacityCommitmentRequest, dict]]): - The request object. The request for - [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. - capacity_commitment (:class:`google.cloud.bigquery_reservation_v1.types.CapacityCommitment`): - Content of the capacity commitment to - update. - - This corresponds to the ``capacity_commitment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Standard field mask for the set of - fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [capacity_commitment, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.UpdateCapacityCommitmentRequest): - request = reservation.UpdateCapacityCommitmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if capacity_commitment is not None: - request.capacity_commitment = capacity_commitment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("capacity_commitment.name", request.capacity_commitment.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def split_capacity_commitment(self, - request: Optional[Union[reservation.SplitCapacityCommitmentRequest, dict]] = None, - *, - name: Optional[str] = None, - slot_count: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.SplitCapacityCommitmentResponse: - r"""Splits capacity commitment to two commitments of the same plan - and ``commitment_end_time``. - - A common use case is to enable downgrading commitments. - - For example, in order to downgrade from 10000 slots to 8000, you - might split a 10000 capacity commitment into commitments of 2000 - and 8000. Then, you delete the first one after the commitment - end time passes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_split_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SplitCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = await client.split_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentRequest, dict]]): - The request object. The request for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - name (:class:`str`): - Required. The resource name e.g.,: - ``projects/myproject/locations/US/capacityCommitments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - slot_count (:class:`int`): - Number of slots in the capacity - commitment after the split. - - This corresponds to the ``slot_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentResponse: - The response for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, slot_count] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.SplitCapacityCommitmentRequest): - request = reservation.SplitCapacityCommitmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if slot_count is not None: - request.slot_count = slot_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.split_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def merge_capacity_commitments(self, - request: Optional[Union[reservation.MergeCapacityCommitmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - capacity_commitment_ids: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Merges capacity commitments of the same plan into a single - commitment. - - The resulting capacity commitment has the greater - commitment_end_time out of the to-be-merged capacity - commitments. - - Attempting to merge capacity commitments of different plan will - fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_merge_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MergeCapacityCommitmentsRequest( - ) - - # Make the request - response = await client.merge_capacity_commitments(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.MergeCapacityCommitmentsRequest, dict]]): - The request object. The request for - [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. - parent (:class:`str`): - Parent resource that identifies admin project and - location e.g., ``projects/myproject/locations/us`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - capacity_commitment_ids (:class:`MutableSequence[str]`): - Ids of capacity commitments to merge. - These capacity commitments must exist - under admin project and location - specified in the parent. - ID is the last portion of capacity - commitment name e.g., 'abc' for - projects/myproject/locations/US/capacityCommitments/abc - - This corresponds to the ``capacity_commitment_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, capacity_commitment_ids] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.MergeCapacityCommitmentsRequest): - request = reservation.MergeCapacityCommitmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if capacity_commitment_ids: - request.capacity_commitment_ids.extend(capacity_commitment_ids) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.merge_capacity_commitments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_assignment(self, - request: Optional[Union[reservation.CreateAssignmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - assignment: Optional[reservation.Assignment] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Creates an assignment object which allows the given project to - submit jobs of a certain type using slots from the specified - reservation. - - Currently a resource (project, folder, organization) can only - have one assignment per each (job_type, location) combination, - and that reservation will be used for all jobs of the matching - type. - - Different assignments can be created on different levels of the - projects, folders or organization hierarchy. During query - execution, the assignment is looked up at the project, folder - and organization levels in that order. The first assignment - found is applied to the query. - - When creating assignments, it does not matter if other - assignments exist at higher levels. - - Example: - - - The organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Assignments for all three entities (``organizationA``, - ``project1``, and ``project2``) could all be created and - mapped to the same or different reservations. - - "None" assignments represent an absence of the assignment. - Projects assigned to None use on-demand pricing. To create a - "None" assignment, use "none" as a reservation_id in the parent. - Example parent: - ``projects/myproject/locations/US/reservations/none``. - - Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not - have 'bigquery.admin' permissions on the project using the - reservation and the project that owns this reservation. - - Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of - the assignment does not match location of the reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_create_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateAssignmentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_assignment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateAssignmentRequest, dict]]): - The request object. The request for - [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. - Note: "bigquery.reservationAssignments.create" - permission is required on the related assignee. - parent (:class:`str`): - Required. The parent resource name of the assignment - E.g. - ``projects/myproject/locations/US/reservations/team1-prod`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - assignment (:class:`google.cloud.bigquery_reservation_v1.types.Assignment`): - Assignment resource to create. - This corresponds to the ``assignment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, assignment] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.CreateAssignmentRequest): - request = reservation.CreateAssignmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if assignment is not None: - request.assignment = assignment - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_assignments(self, - request: Optional[Union[reservation.ListAssignmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssignmentsAsyncPager: - r"""Lists assignments. - - Only explicitly created assignments will be returned. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, ListAssignments will just return the above two - assignments for reservation ``res1``, and no expansion/merge - will happen. - - The wildcard "-" can be used for reservations in the request. In - that case all assignments belongs to the specified project and - location will be listed. - - **Note** "-" cannot be used for projects nor locations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_list_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assignments(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest, dict]]): - The request object. The request for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - parent (:class:`str`): - Required. The parent resource name e.g.: - - ``projects/myproject/locations/US/reservations/team1-prod`` - - Or: - - ``projects/myproject/locations/US/reservations/-`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListAssignmentsAsyncPager: - The response for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.ListAssignmentsRequest): - request = reservation.ListAssignmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_assignments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListAssignmentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_assignment(self, - request: Optional[Union[reservation.DeleteAssignmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a assignment. No expansion will happen. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, deletion of the ```` - assignment won't affect the other assignment - ````. After said deletion, queries from - ``project1`` will still use ``res1`` while queries from - ``project2`` will switch to use on-demand mode. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_delete_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteAssignmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_assignment(request=request) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteAssignmentRequest, dict]]): - The request object. The request for - [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. - Note: "bigquery.reservationAssignments.delete" - permission is required on the related assignee. - name (:class:`str`): - Required. Name of the resource, e.g. - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.DeleteAssignmentRequest): - request = reservation.DeleteAssignmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def search_assignments(self, - request: Optional[Union[reservation.SearchAssignmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAssignmentsAsyncPager: - r"""Deprecated: Looks up assignments for a specified resource for a - particular region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - **Note** "-" cannot be used for projects nor locations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_search_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_assignments(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest, dict]]): - The request object. The request for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. - parent (:class:`str`): - Required. The resource name of the admin - project(containing project and location), e.g.: - ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Please specify resource name as assignee in the query. - - Examples: - - - ``assignee=projects/myproject`` - - ``assignee=folders/123`` - - ``assignee=organizations/456`` - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAssignmentsAsyncPager: - The response for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - warnings.warn("ReservationServiceAsyncClient.search_assignments is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.SearchAssignmentsRequest): - request = reservation.SearchAssignmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_assignments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchAssignmentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def search_all_assignments(self, - request: Optional[Union[reservation.SearchAllAssignmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllAssignmentsAsyncPager: - r"""Looks up assignments for a specified resource for a particular - region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_search_all_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAllAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_all_assignments(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest, dict]]): - The request object. The request for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. - parent (:class:`str`): - Required. The resource name with location (project name - could be the wildcard '-'), e.g.: - ``projects/-/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (:class:`str`): - Please specify resource name as assignee in the query. - - Examples: - - - ``assignee=projects/myproject`` - - ``assignee=folders/123`` - - ``assignee=organizations/456`` - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAllAssignmentsAsyncPager: - The response for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.SearchAllAssignmentsRequest): - request = reservation.SearchAllAssignmentsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_assignments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.SearchAllAssignmentsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def move_assignment(self, - request: Optional[Union[reservation.MoveAssignmentRequest, dict]] = None, - *, - name: Optional[str] = None, - destination_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Moves an assignment under a new reservation. - - This differs from removing an existing assignment and - recreating a new one by providing a transactional change - that ensures an assignee always has an associated - reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_move_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MoveAssignmentRequest( - name="name_value", - ) - - # Make the request - response = await client.move_assignment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.MoveAssignmentRequest, dict]]): - The request object. The request for - [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. - - **Note**: "bigquery.reservationAssignments.create" - permission is required on the destination_id. - - **Note**: "bigquery.reservationAssignments.create" and - "bigquery.reservationAssignments.delete" permission are - required on the related assignee. - name (:class:`str`): - Required. The resource name of the assignment, e.g. - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - destination_id (:class:`str`): - The new reservation ID, e.g.: - ``projects/myotherproject/locations/US/reservations/team2-prod`` - - This corresponds to the ``destination_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, destination_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.MoveAssignmentRequest): - request = reservation.MoveAssignmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if destination_id is not None: - request.destination_id = destination_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.move_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_assignment(self, - request: Optional[Union[reservation.UpdateAssignmentRequest, dict]] = None, - *, - assignment: Optional[reservation.Assignment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Updates an existing assignment. - - Only the ``priority`` field can be updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_update_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateAssignmentRequest( - ) - - # Make the request - response = await client.update_assignment(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateAssignmentRequest, dict]]): - The request object. The request for - [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. - assignment (:class:`google.cloud.bigquery_reservation_v1.types.Assignment`): - Content of the assignment to update. - This corresponds to the ``assignment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Standard field mask for the set of - fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [assignment, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.UpdateAssignmentRequest): - request = reservation.UpdateAssignmentRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if assignment is not None: - request.assignment = assignment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("assignment.name", request.assignment.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_bi_reservation(self, - request: Optional[Union[reservation.GetBiReservationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.BiReservation: - r"""Retrieves a BI reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_get_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetBiReservationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_bi_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest, dict]]): - The request object. A request to get a singleton BI - reservation. - name (:class:`str`): - Required. Name of the requested reservation, for - example: - ``projects/{project_id}/locations/{location_id}/biReservation`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.BiReservation: - Represents a BI Reservation. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.GetBiReservationRequest): - request = reservation.GetBiReservationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_bi_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_bi_reservation(self, - request: Optional[Union[reservation.UpdateBiReservationRequest, dict]] = None, - *, - bi_reservation: Optional[reservation.BiReservation] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.BiReservation: - r"""Updates a BI reservation. - - Only fields specified in the ``field_mask`` are updated. - - A singleton BI reservation always exists with default size 0. In - order to reserve BI capacity it needs to be updated to an amount - greater than 0. In order to release BI capacity reservation size - must be set to 0. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - async def sample_update_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateBiReservationRequest( - ) - - # Make the request - response = await client.update_bi_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.bigquery_reservation_v1.types.UpdateBiReservationRequest, dict]]): - The request object. A request to update a BI reservation. - bi_reservation (:class:`google.cloud.bigquery_reservation_v1.types.BiReservation`): - A reservation to update. - This corresponds to the ``bi_reservation`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A list of fields to be updated in - this request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.BiReservation: - Represents a BI Reservation. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [bi_reservation, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.UpdateBiReservationRequest): - request = reservation.UpdateBiReservationRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if bi_reservation is not None: - request.bi_reservation = bi_reservation - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_bi_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("bi_reservation.name", request.bi_reservation.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self) -> "ReservationServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ReservationServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py deleted file mode 100644 index f9c3059a06de..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ /dev/null @@ -1,3384 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.bigquery_reservation_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import ReservationServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import ReservationServiceGrpcTransport -from .transports.grpc_asyncio import ReservationServiceGrpcAsyncIOTransport -from .transports.rest import ReservationServiceRestTransport - - -class ReservationServiceClientMeta(type): - """Metaclass for the ReservationService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[ReservationServiceTransport]] - _transport_registry["grpc"] = ReservationServiceGrpcTransport - _transport_registry["grpc_asyncio"] = ReservationServiceGrpcAsyncIOTransport - _transport_registry["rest"] = ReservationServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ReservationServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class ReservationServiceClient(metaclass=ReservationServiceClientMeta): - """This API allows users to manage their BigQuery reservations. - - A reservation provides computational resource guarantees, in the - form of `slots `__, to - users. A slot is a unit of computational power in BigQuery, and - serves as the basic unit of parallelism. In a scan of a - multi-partitioned table, a single slot operates on a single - partition of the table. A reservation resource exists as a child - resource of the admin project and location, e.g.: - ``projects/myproject/locations/US/reservations/reservationName``. - - A capacity commitment is a way to purchase compute capacity for - BigQuery jobs (in the form of slots) with some committed period of - usage. A capacity commitment resource exists as a child resource of - the admin project and location, e.g.: - ``projects/myproject/locations/US/capacityCommitments/id``. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "bigqueryreservation.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "bigqueryreservation.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ReservationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - ReservationServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> ReservationServiceTransport: - """Returns the transport used by the client instance. - - Returns: - ReservationServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def assignment_path(project: str,location: str,reservation: str,assignment: str,) -> str: - """Returns a fully-qualified assignment string.""" - return "projects/{project}/locations/{location}/reservations/{reservation}/assignments/{assignment}".format(project=project, location=location, reservation=reservation, assignment=assignment, ) - - @staticmethod - def parse_assignment_path(path: str) -> Dict[str,str]: - """Parses a assignment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/reservations/(?P.+?)/assignments/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def bi_reservation_path(project: str,location: str,) -> str: - """Returns a fully-qualified bi_reservation string.""" - return "projects/{project}/locations/{location}/biReservation".format(project=project, location=location, ) - - @staticmethod - def parse_bi_reservation_path(path: str) -> Dict[str,str]: - """Parses a bi_reservation path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/biReservation$", path) - return m.groupdict() if m else {} - - @staticmethod - def capacity_commitment_path(project: str,location: str,capacity_commitment: str,) -> str: - """Returns a fully-qualified capacity_commitment string.""" - return "projects/{project}/locations/{location}/capacityCommitments/{capacity_commitment}".format(project=project, location=location, capacity_commitment=capacity_commitment, ) - - @staticmethod - def parse_capacity_commitment_path(path: str) -> Dict[str,str]: - """Parses a capacity_commitment path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/capacityCommitments/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def reservation_path(project: str,location: str,reservation: str,) -> str: - """Returns a fully-qualified reservation string.""" - return "projects/{project}/locations/{location}/reservations/{reservation}".format(project=project, location=location, reservation=reservation, ) - - @staticmethod - def parse_reservation_path(path: str) -> Dict[str,str]: - """Parses a reservation path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/reservations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = ReservationServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = ReservationServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = ReservationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = ReservationServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ReservationServiceTransport, Callable[..., ReservationServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the reservation service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,ReservationServiceTransport,Callable[..., ReservationServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the ReservationServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ReservationServiceClient._read_environment_variables() - self._client_cert_source = ReservationServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = ReservationServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, ReservationServiceTransport) - if transport_provided: - # transport is a ReservationServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(ReservationServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - ReservationServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[ReservationServiceTransport], Callable[..., ReservationServiceTransport]] = ( - ReservationServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., ReservationServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.bigquery.reservation_v1.ReservationServiceClient`.", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "credentialsType": None, - } - ) - - def create_reservation(self, - request: Optional[Union[gcbr_reservation.CreateReservationRequest, dict]] = None, - *, - parent: Optional[str] = None, - reservation: Optional[gcbr_reservation.Reservation] = None, - reservation_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbr_reservation.Reservation: - r"""Creates a new reservation resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_create_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateReservationRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.CreateReservationRequest, dict]): - The request object. The request for - [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. - parent (str): - Required. Project, location. E.g., - ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reservation (google.cloud.bigquery_reservation_v1.types.Reservation): - Definition of the new reservation to - create. - - This corresponds to the ``reservation`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - reservation_id (str): - The reservation ID. It must only - contain lower case alphanumeric - characters or dashes. It must start with - a letter and must not end with a dash. - Its maximum length is 64 characters. - - This corresponds to the ``reservation_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, reservation, reservation_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbr_reservation.CreateReservationRequest): - request = gcbr_reservation.CreateReservationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if reservation is not None: - request.reservation = reservation - if reservation_id is not None: - request.reservation_id = reservation_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_reservations(self, - request: Optional[Union[reservation.ListReservationsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListReservationsPager: - r"""Lists all the reservations for the project in the - specified location. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_list_reservations(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListReservationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_reservations(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.ListReservationsRequest, dict]): - The request object. The request for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - parent (str): - Required. The parent resource name containing project - and location, e.g.: ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationsPager: - The response for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.ListReservationsRequest): - request = reservation.ListReservationsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_reservations] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListReservationsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_reservation(self, - request: Optional[Union[reservation.GetReservationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Reservation: - r"""Returns information about the reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_get_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetReservationRequest( - name="name_value", - ) - - # Make the request - response = client.get_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.GetReservationRequest, dict]): - The request object. The request for - [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. - name (str): - Required. Resource name of the reservation to retrieve. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.GetReservationRequest): - request = reservation.GetReservationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_reservation(self, - request: Optional[Union[reservation.DeleteReservationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a reservation. Returns - ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has - assignments. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_delete_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteReservationRequest( - name="name_value", - ) - - # Make the request - client.delete_reservation(request=request) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest, dict]): - The request object. The request for - [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. - name (str): - Required. Resource name of the reservation to retrieve. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.DeleteReservationRequest): - request = reservation.DeleteReservationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_reservation(self, - request: Optional[Union[gcbr_reservation.UpdateReservationRequest, dict]] = None, - *, - reservation: Optional[gcbr_reservation.Reservation] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gcbr_reservation.Reservation: - r"""Updates an existing reservation resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_update_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateReservationRequest( - ) - - # Make the request - response = client.update_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.UpdateReservationRequest, dict]): - The request object. The request for - [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. - reservation (google.cloud.bigquery_reservation_v1.types.Reservation): - Content of the reservation to update. - This corresponds to the ``reservation`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Standard field mask for the set of - fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [reservation, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, gcbr_reservation.UpdateReservationRequest): - request = gcbr_reservation.UpdateReservationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if reservation is not None: - request.reservation = reservation - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("reservation.name", request.reservation.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def failover_reservation(self, - request: Optional[Union[reservation.FailoverReservationRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Reservation: - r"""Fail over a reservation to the secondary location. The operation - should be done in the current secondary location, which will be - promoted to the new primary location for the reservation. - Attempting to failover a reservation in the current primary - location will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_failover_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.FailoverReservationRequest( - name="name_value", - ) - - # Make the request - response = client.failover_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest, dict]): - The request object. The request for - ReservationService.FailoverReservation. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.FailoverReservationRequest): - request = reservation.FailoverReservationRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.failover_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_capacity_commitment(self, - request: Optional[Union[reservation.CreateCapacityCommitmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - capacity_commitment: Optional[reservation.CapacityCommitment] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Creates a new capacity commitment resource. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_create_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateCapacityCommitmentRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.CreateCapacityCommitmentRequest, dict]): - The request object. The request for - [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. - parent (str): - Required. Resource name of the parent reservation. E.g., - ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): - Content of the capacity commitment to - create. - - This corresponds to the ``capacity_commitment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, capacity_commitment] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.CreateCapacityCommitmentRequest): - request = reservation.CreateCapacityCommitmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if capacity_commitment is not None: - request.capacity_commitment = capacity_commitment - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_capacity_commitments(self, - request: Optional[Union[reservation.ListCapacityCommitmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListCapacityCommitmentsPager: - r"""Lists all the capacity commitments for the admin - project. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_list_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListCapacityCommitmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_capacity_commitments(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest, dict]): - The request object. The request for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - parent (str): - Required. Resource name of the parent reservation. E.g., - ``projects/myproject/locations/US`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsPager: - The response for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.ListCapacityCommitmentsRequest): - request = reservation.ListCapacityCommitmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_capacity_commitments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListCapacityCommitmentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_capacity_commitment(self, - request: Optional[Union[reservation.GetCapacityCommitmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Returns information about the capacity commitment. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_get_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = client.get_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest, dict]): - The request object. The request for - [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. - name (str): - Required. Resource name of the capacity commitment to - retrieve. E.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.GetCapacityCommitmentRequest): - request = reservation.GetCapacityCommitmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_capacity_commitment(self, - request: Optional[Union[reservation.DeleteCapacityCommitmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a capacity commitment. Attempting to delete capacity - commitment before its commitment_end_time will fail with the - error code ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_delete_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - client.delete_capacity_commitment(request=request) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.DeleteCapacityCommitmentRequest, dict]): - The request object. The request for - [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. - name (str): - Required. Resource name of the capacity commitment to - delete. E.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.DeleteCapacityCommitmentRequest): - request = reservation.DeleteCapacityCommitmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def update_capacity_commitment(self, - request: Optional[Union[reservation.UpdateCapacityCommitmentRequest, dict]] = None, - *, - capacity_commitment: Optional[reservation.CapacityCommitment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Updates an existing capacity commitment. - - Only ``plan`` and ``renewal_plan`` fields can be updated. - - Plan can only be changed to a plan of a longer commitment - period. Attempting to change to a plan with shorter commitment - period will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_update_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateCapacityCommitmentRequest( - ) - - # Make the request - response = client.update_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.UpdateCapacityCommitmentRequest, dict]): - The request object. The request for - [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. - capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): - Content of the capacity commitment to - update. - - This corresponds to the ``capacity_commitment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Standard field mask for the set of - fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [capacity_commitment, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.UpdateCapacityCommitmentRequest): - request = reservation.UpdateCapacityCommitmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if capacity_commitment is not None: - request.capacity_commitment = capacity_commitment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("capacity_commitment.name", request.capacity_commitment.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def split_capacity_commitment(self, - request: Optional[Union[reservation.SplitCapacityCommitmentRequest, dict]] = None, - *, - name: Optional[str] = None, - slot_count: Optional[int] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.SplitCapacityCommitmentResponse: - r"""Splits capacity commitment to two commitments of the same plan - and ``commitment_end_time``. - - A common use case is to enable downgrading commitments. - - For example, in order to downgrade from 10000 slots to 8000, you - might split a 10000 capacity commitment into commitments of 2000 - and 8000. Then, you delete the first one after the commitment - end time passes. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_split_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SplitCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = client.split_capacity_commitment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentRequest, dict]): - The request object. The request for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - name (str): - Required. The resource name e.g.,: - ``projects/myproject/locations/US/capacityCommitments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - slot_count (int): - Number of slots in the capacity - commitment after the split. - - This corresponds to the ``slot_count`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentResponse: - The response for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, slot_count] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.SplitCapacityCommitmentRequest): - request = reservation.SplitCapacityCommitmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if slot_count is not None: - request.slot_count = slot_count - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.split_capacity_commitment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def merge_capacity_commitments(self, - request: Optional[Union[reservation.MergeCapacityCommitmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - capacity_commitment_ids: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Merges capacity commitments of the same plan into a single - commitment. - - The resulting capacity commitment has the greater - commitment_end_time out of the to-be-merged capacity - commitments. - - Attempting to merge capacity commitments of different plan will - fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_merge_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MergeCapacityCommitmentsRequest( - ) - - # Make the request - response = client.merge_capacity_commitments(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.MergeCapacityCommitmentsRequest, dict]): - The request object. The request for - [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. - parent (str): - Parent resource that identifies admin project and - location e.g., ``projects/myproject/locations/us`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - capacity_commitment_ids (MutableSequence[str]): - Ids of capacity commitments to merge. - These capacity commitments must exist - under admin project and location - specified in the parent. - ID is the last portion of capacity - commitment name e.g., 'abc' for - projects/myproject/locations/US/capacityCommitments/abc - - This corresponds to the ``capacity_commitment_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, capacity_commitment_ids] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.MergeCapacityCommitmentsRequest): - request = reservation.MergeCapacityCommitmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if capacity_commitment_ids is not None: - request.capacity_commitment_ids = capacity_commitment_ids - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.merge_capacity_commitments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_assignment(self, - request: Optional[Union[reservation.CreateAssignmentRequest, dict]] = None, - *, - parent: Optional[str] = None, - assignment: Optional[reservation.Assignment] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Creates an assignment object which allows the given project to - submit jobs of a certain type using slots from the specified - reservation. - - Currently a resource (project, folder, organization) can only - have one assignment per each (job_type, location) combination, - and that reservation will be used for all jobs of the matching - type. - - Different assignments can be created on different levels of the - projects, folders or organization hierarchy. During query - execution, the assignment is looked up at the project, folder - and organization levels in that order. The first assignment - found is applied to the query. - - When creating assignments, it does not matter if other - assignments exist at higher levels. - - Example: - - - The organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Assignments for all three entities (``organizationA``, - ``project1``, and ``project2``) could all be created and - mapped to the same or different reservations. - - "None" assignments represent an absence of the assignment. - Projects assigned to None use on-demand pricing. To create a - "None" assignment, use "none" as a reservation_id in the parent. - Example parent: - ``projects/myproject/locations/US/reservations/none``. - - Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not - have 'bigquery.admin' permissions on the project using the - reservation and the project that owns this reservation. - - Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of - the assignment does not match location of the reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_create_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateAssignmentRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_assignment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.CreateAssignmentRequest, dict]): - The request object. The request for - [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. - Note: "bigquery.reservationAssignments.create" - permission is required on the related assignee. - parent (str): - Required. The parent resource name of the assignment - E.g. - ``projects/myproject/locations/US/reservations/team1-prod`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - assignment (google.cloud.bigquery_reservation_v1.types.Assignment): - Assignment resource to create. - This corresponds to the ``assignment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, assignment] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.CreateAssignmentRequest): - request = reservation.CreateAssignmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if assignment is not None: - request.assignment = assignment - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_assignments(self, - request: Optional[Union[reservation.ListAssignmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssignmentsPager: - r"""Lists assignments. - - Only explicitly created assignments will be returned. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, ListAssignments will just return the above two - assignments for reservation ``res1``, and no expansion/merge - will happen. - - The wildcard "-" can be used for reservations in the request. In - that case all assignments belongs to the specified project and - location will be listed. - - **Note** "-" cannot be used for projects nor locations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_list_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assignments(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest, dict]): - The request object. The request for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - parent (str): - Required. The parent resource name e.g.: - - ``projects/myproject/locations/US/reservations/team1-prod`` - - Or: - - ``projects/myproject/locations/US/reservations/-`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListAssignmentsPager: - The response for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.ListAssignmentsRequest): - request = reservation.ListAssignmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_assignments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListAssignmentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_assignment(self, - request: Optional[Union[reservation.DeleteAssignmentRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a assignment. No expansion will happen. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, deletion of the ```` - assignment won't affect the other assignment - ````. After said deletion, queries from - ``project1`` will still use ``res1`` while queries from - ``project2`` will switch to use on-demand mode. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_delete_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteAssignmentRequest( - name="name_value", - ) - - # Make the request - client.delete_assignment(request=request) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.DeleteAssignmentRequest, dict]): - The request object. The request for - [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. - Note: "bigquery.reservationAssignments.delete" - permission is required on the related assignee. - name (str): - Required. Name of the resource, e.g. - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.DeleteAssignmentRequest): - request = reservation.DeleteAssignmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def search_assignments(self, - request: Optional[Union[reservation.SearchAssignmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAssignmentsPager: - r"""Deprecated: Looks up assignments for a specified resource for a - particular region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - **Note** "-" cannot be used for projects nor locations. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_search_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_assignments(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest, dict]): - The request object. The request for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. - parent (str): - Required. The resource name of the admin - project(containing project and location), e.g.: - ``projects/myproject/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Please specify resource name as assignee in the query. - - Examples: - - - ``assignee=projects/myproject`` - - ``assignee=folders/123`` - - ``assignee=organizations/456`` - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAssignmentsPager: - The response for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - warnings.warn("ReservationServiceClient.search_assignments is deprecated", - DeprecationWarning) - - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.SearchAssignmentsRequest): - request = reservation.SearchAssignmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_assignments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchAssignmentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def search_all_assignments(self, - request: Optional[Union[reservation.SearchAllAssignmentsRequest, dict]] = None, - *, - parent: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllAssignmentsPager: - r"""Looks up assignments for a specified resource for a particular - region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_search_all_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAllAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_all_assignments(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest, dict]): - The request object. The request for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. - parent (str): - Required. The resource name with location (project name - could be the wildcard '-'), e.g.: - ``projects/-/locations/US``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - query (str): - Please specify resource name as assignee in the query. - - Examples: - - - ``assignee=projects/myproject`` - - ``assignee=folders/123`` - - ``assignee=organizations/456`` - - This corresponds to the ``query`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAllAssignmentsPager: - The response for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, query] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.SearchAllAssignmentsRequest): - request = reservation.SearchAllAssignmentsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if query is not None: - request.query = query - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.search_all_assignments] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.SearchAllAssignmentsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def move_assignment(self, - request: Optional[Union[reservation.MoveAssignmentRequest, dict]] = None, - *, - name: Optional[str] = None, - destination_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Moves an assignment under a new reservation. - - This differs from removing an existing assignment and - recreating a new one by providing a transactional change - that ensures an assignee always has an associated - reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_move_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MoveAssignmentRequest( - name="name_value", - ) - - # Make the request - response = client.move_assignment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.MoveAssignmentRequest, dict]): - The request object. The request for - [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. - - **Note**: "bigquery.reservationAssignments.create" - permission is required on the destination_id. - - **Note**: "bigquery.reservationAssignments.create" and - "bigquery.reservationAssignments.delete" permission are - required on the related assignee. - name (str): - Required. The resource name of the assignment, e.g. - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - destination_id (str): - The new reservation ID, e.g.: - ``projects/myotherproject/locations/US/reservations/team2-prod`` - - This corresponds to the ``destination_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name, destination_id] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.MoveAssignmentRequest): - request = reservation.MoveAssignmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if destination_id is not None: - request.destination_id = destination_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.move_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_assignment(self, - request: Optional[Union[reservation.UpdateAssignmentRequest, dict]] = None, - *, - assignment: Optional[reservation.Assignment] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Updates an existing assignment. - - Only the ``priority`` field can be updated. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_update_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateAssignmentRequest( - ) - - # Make the request - response = client.update_assignment(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.UpdateAssignmentRequest, dict]): - The request object. The request for - [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. - assignment (google.cloud.bigquery_reservation_v1.types.Assignment): - Content of the assignment to update. - This corresponds to the ``assignment`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Standard field mask for the set of - fields to be updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [assignment, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.UpdateAssignmentRequest): - request = reservation.UpdateAssignmentRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if assignment is not None: - request.assignment = assignment - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_assignment] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("assignment.name", request.assignment.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_bi_reservation(self, - request: Optional[Union[reservation.GetBiReservationRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.BiReservation: - r"""Retrieves a BI reservation. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_get_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetBiReservationRequest( - name="name_value", - ) - - # Make the request - response = client.get_bi_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest, dict]): - The request object. A request to get a singleton BI - reservation. - name (str): - Required. Name of the requested reservation, for - example: - ``projects/{project_id}/locations/{location_id}/biReservation`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.BiReservation: - Represents a BI Reservation. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.GetBiReservationRequest): - request = reservation.GetBiReservationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_bi_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_bi_reservation(self, - request: Optional[Union[reservation.UpdateBiReservationRequest, dict]] = None, - *, - bi_reservation: Optional[reservation.BiReservation] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.BiReservation: - r"""Updates a BI reservation. - - Only fields specified in the ``field_mask`` are updated. - - A singleton BI reservation always exists with default size 0. In - order to reserve BI capacity it needs to be updated to an amount - greater than 0. In order to release BI capacity reservation size - must be set to 0. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import bigquery_reservation_v1 - - def sample_update_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateBiReservationRequest( - ) - - # Make the request - response = client.update_bi_reservation(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.bigquery_reservation_v1.types.UpdateBiReservationRequest, dict]): - The request object. A request to update a BI reservation. - bi_reservation (google.cloud.bigquery_reservation_v1.types.BiReservation): - A reservation to update. - This corresponds to the ``bi_reservation`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A list of fields to be updated in - this request. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.bigquery_reservation_v1.types.BiReservation: - Represents a BI Reservation. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [bi_reservation, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, reservation.UpdateBiReservationRequest): - request = reservation.UpdateBiReservationRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if bi_reservation is not None: - request.bi_reservation = bi_reservation - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_bi_reservation] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("bi_reservation.name", request.bi_reservation.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self) -> "ReservationServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "ReservationServiceClient", -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py deleted file mode 100644 index d1b535de7d36..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py +++ /dev/null @@ -1,722 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.bigquery_reservation_v1.types import reservation - - -class ListReservationsPager: - """A pager for iterating through ``list_reservations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``reservations`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListReservations`` requests and continue to iterate - through the ``reservations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., reservation.ListReservationsResponse], - request: reservation.ListReservationsRequest, - response: reservation.ListReservationsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.ListReservationsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.ListReservationsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.ListReservationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[reservation.ListReservationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[reservation.Reservation]: - for page in self.pages: - yield from page.reservations - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListReservationsAsyncPager: - """A pager for iterating through ``list_reservations`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``reservations`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListReservations`` requests and continue to iterate - through the ``reservations`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListReservationsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[reservation.ListReservationsResponse]], - request: reservation.ListReservationsRequest, - response: reservation.ListReservationsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.ListReservationsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.ListReservationsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.ListReservationsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[reservation.ListReservationsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[reservation.Reservation]: - async def async_generator(): - async for page in self.pages: - for response in page.reservations: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListCapacityCommitmentsPager: - """A pager for iterating through ``list_capacity_commitments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``capacity_commitments`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListCapacityCommitments`` requests and continue to iterate - through the ``capacity_commitments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., reservation.ListCapacityCommitmentsResponse], - request: reservation.ListCapacityCommitmentsRequest, - response: reservation.ListCapacityCommitmentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.ListCapacityCommitmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[reservation.ListCapacityCommitmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[reservation.CapacityCommitment]: - for page in self.pages: - yield from page.capacity_commitments - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListCapacityCommitmentsAsyncPager: - """A pager for iterating through ``list_capacity_commitments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``capacity_commitments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListCapacityCommitments`` requests and continue to iterate - through the ``capacity_commitments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[reservation.ListCapacityCommitmentsResponse]], - request: reservation.ListCapacityCommitmentsRequest, - response: reservation.ListCapacityCommitmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.ListCapacityCommitmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[reservation.ListCapacityCommitmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[reservation.CapacityCommitment]: - async def async_generator(): - async for page in self.pages: - for response in page.capacity_commitments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssignmentsPager: - """A pager for iterating through ``list_assignments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assignments`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListAssignments`` requests and continue to iterate - through the ``assignments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., reservation.ListAssignmentsResponse], - request: reservation.ListAssignmentsRequest, - response: reservation.ListAssignmentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.ListAssignmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[reservation.ListAssignmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[reservation.Assignment]: - for page in self.pages: - yield from page.assignments - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListAssignmentsAsyncPager: - """A pager for iterating through ``list_assignments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assignments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListAssignments`` requests and continue to iterate - through the ``assignments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[reservation.ListAssignmentsResponse]], - request: reservation.ListAssignmentsRequest, - response: reservation.ListAssignmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.ListAssignmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.ListAssignmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[reservation.ListAssignmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[reservation.Assignment]: - async def async_generator(): - async for page in self.pages: - for response in page.assignments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAssignmentsPager: - """A pager for iterating through ``search_assignments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assignments`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchAssignments`` requests and continue to iterate - through the ``assignments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., reservation.SearchAssignmentsResponse], - request: reservation.SearchAssignmentsRequest, - response: reservation.SearchAssignmentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.SearchAssignmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[reservation.SearchAssignmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[reservation.Assignment]: - for page in self.pages: - yield from page.assignments - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAssignmentsAsyncPager: - """A pager for iterating through ``search_assignments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assignments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchAssignments`` requests and continue to iterate - through the ``assignments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[reservation.SearchAssignmentsResponse]], - request: reservation.SearchAssignmentsRequest, - response: reservation.SearchAssignmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.SearchAssignmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.SearchAssignmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[reservation.SearchAssignmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[reservation.Assignment]: - async def async_generator(): - async for page in self.pages: - for response in page.assignments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllAssignmentsPager: - """A pager for iterating through ``search_all_assignments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``assignments`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``SearchAllAssignments`` requests and continue to iterate - through the ``assignments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., reservation.SearchAllAssignmentsResponse], - request: reservation.SearchAllAssignmentsRequest, - response: reservation.SearchAllAssignmentsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.SearchAllAssignmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[reservation.SearchAllAssignmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[reservation.Assignment]: - for page in self.pages: - yield from page.assignments - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class SearchAllAssignmentsAsyncPager: - """A pager for iterating through ``search_all_assignments`` requests. - - This class thinly wraps an initial - :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``assignments`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``SearchAllAssignments`` requests and continue to iterate - through the ``assignments`` field on the - corresponding responses. - - All the usual :class:`google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[reservation.SearchAllAssignmentsResponse]], - request: reservation.SearchAllAssignmentsRequest, - response: reservation.SearchAllAssignmentsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest): - The initial request object. - response (google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = reservation.SearchAllAssignmentsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[reservation.SearchAllAssignmentsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[reservation.Assignment]: - async def async_generator(): - async for page in self.pages: - for response in page.assignments: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/README.rst b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/README.rst deleted file mode 100644 index b69ca178e2ae..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`ReservationServiceTransport` is the ABC for all transports. -- public child `ReservationServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `ReservationServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseReservationServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `ReservationServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/__init__.py deleted file mode 100644 index bdbb03f004fd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import ReservationServiceTransport -from .grpc import ReservationServiceGrpcTransport -from .grpc_asyncio import ReservationServiceGrpcAsyncIOTransport -from .rest import ReservationServiceRestTransport -from .rest import ReservationServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[ReservationServiceTransport]] -_transport_registry['grpc'] = ReservationServiceGrpcTransport -_transport_registry['grpc_asyncio'] = ReservationServiceGrpcAsyncIOTransport -_transport_registry['rest'] = ReservationServiceRestTransport - -__all__ = ( - 'ReservationServiceTransport', - 'ReservationServiceGrpcTransport', - 'ReservationServiceGrpcAsyncIOTransport', - 'ReservationServiceRestTransport', - 'ReservationServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py deleted file mode 100644 index 2e5c3bbf4d92..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py +++ /dev/null @@ -1,551 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.bigquery_reservation_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class ReservationServiceTransport(abc.ABC): - """Abstract transport class for ReservationService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'bigqueryreservation.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryreservation.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_reservation: gapic_v1.method.wrap_method( - self.create_reservation, - default_timeout=300.0, - client_info=client_info, - ), - self.list_reservations: gapic_v1.method.wrap_method( - self.list_reservations, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_reservation: gapic_v1.method.wrap_method( - self.get_reservation, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_reservation: gapic_v1.method.wrap_method( - self.delete_reservation, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_reservation: gapic_v1.method.wrap_method( - self.update_reservation, - default_timeout=300.0, - client_info=client_info, - ), - self.failover_reservation: gapic_v1.method.wrap_method( - self.failover_reservation, - default_timeout=300.0, - client_info=client_info, - ), - self.create_capacity_commitment: gapic_v1.method.wrap_method( - self.create_capacity_commitment, - default_timeout=300.0, - client_info=client_info, - ), - self.list_capacity_commitments: gapic_v1.method.wrap_method( - self.list_capacity_commitments, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_capacity_commitment: gapic_v1.method.wrap_method( - self.get_capacity_commitment, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_capacity_commitment: gapic_v1.method.wrap_method( - self.delete_capacity_commitment, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_capacity_commitment: gapic_v1.method.wrap_method( - self.update_capacity_commitment, - default_timeout=300.0, - client_info=client_info, - ), - self.split_capacity_commitment: gapic_v1.method.wrap_method( - self.split_capacity_commitment, - default_timeout=300.0, - client_info=client_info, - ), - self.merge_capacity_commitments: gapic_v1.method.wrap_method( - self.merge_capacity_commitments, - default_timeout=300.0, - client_info=client_info, - ), - self.create_assignment: gapic_v1.method.wrap_method( - self.create_assignment, - default_timeout=300.0, - client_info=client_info, - ), - self.list_assignments: gapic_v1.method.wrap_method( - self.list_assignments, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_assignment: gapic_v1.method.wrap_method( - self.delete_assignment, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.search_assignments: gapic_v1.method.wrap_method( - self.search_assignments, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.search_all_assignments: gapic_v1.method.wrap_method( - self.search_all_assignments, - default_timeout=None, - client_info=client_info, - ), - self.move_assignment: gapic_v1.method.wrap_method( - self.move_assignment, - default_timeout=300.0, - client_info=client_info, - ), - self.update_assignment: gapic_v1.method.wrap_method( - self.update_assignment, - default_timeout=None, - client_info=client_info, - ), - self.get_bi_reservation: gapic_v1.method.wrap_method( - self.get_bi_reservation, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_bi_reservation: gapic_v1.method.wrap_method( - self.update_bi_reservation, - default_timeout=300.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_reservation(self) -> Callable[ - [gcbr_reservation.CreateReservationRequest], - Union[ - gcbr_reservation.Reservation, - Awaitable[gcbr_reservation.Reservation] - ]]: - raise NotImplementedError() - - @property - def list_reservations(self) -> Callable[ - [reservation.ListReservationsRequest], - Union[ - reservation.ListReservationsResponse, - Awaitable[reservation.ListReservationsResponse] - ]]: - raise NotImplementedError() - - @property - def get_reservation(self) -> Callable[ - [reservation.GetReservationRequest], - Union[ - reservation.Reservation, - Awaitable[reservation.Reservation] - ]]: - raise NotImplementedError() - - @property - def delete_reservation(self) -> Callable[ - [reservation.DeleteReservationRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_reservation(self) -> Callable[ - [gcbr_reservation.UpdateReservationRequest], - Union[ - gcbr_reservation.Reservation, - Awaitable[gcbr_reservation.Reservation] - ]]: - raise NotImplementedError() - - @property - def failover_reservation(self) -> Callable[ - [reservation.FailoverReservationRequest], - Union[ - reservation.Reservation, - Awaitable[reservation.Reservation] - ]]: - raise NotImplementedError() - - @property - def create_capacity_commitment(self) -> Callable[ - [reservation.CreateCapacityCommitmentRequest], - Union[ - reservation.CapacityCommitment, - Awaitable[reservation.CapacityCommitment] - ]]: - raise NotImplementedError() - - @property - def list_capacity_commitments(self) -> Callable[ - [reservation.ListCapacityCommitmentsRequest], - Union[ - reservation.ListCapacityCommitmentsResponse, - Awaitable[reservation.ListCapacityCommitmentsResponse] - ]]: - raise NotImplementedError() - - @property - def get_capacity_commitment(self) -> Callable[ - [reservation.GetCapacityCommitmentRequest], - Union[ - reservation.CapacityCommitment, - Awaitable[reservation.CapacityCommitment] - ]]: - raise NotImplementedError() - - @property - def delete_capacity_commitment(self) -> Callable[ - [reservation.DeleteCapacityCommitmentRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def update_capacity_commitment(self) -> Callable[ - [reservation.UpdateCapacityCommitmentRequest], - Union[ - reservation.CapacityCommitment, - Awaitable[reservation.CapacityCommitment] - ]]: - raise NotImplementedError() - - @property - def split_capacity_commitment(self) -> Callable[ - [reservation.SplitCapacityCommitmentRequest], - Union[ - reservation.SplitCapacityCommitmentResponse, - Awaitable[reservation.SplitCapacityCommitmentResponse] - ]]: - raise NotImplementedError() - - @property - def merge_capacity_commitments(self) -> Callable[ - [reservation.MergeCapacityCommitmentsRequest], - Union[ - reservation.CapacityCommitment, - Awaitable[reservation.CapacityCommitment] - ]]: - raise NotImplementedError() - - @property - def create_assignment(self) -> Callable[ - [reservation.CreateAssignmentRequest], - Union[ - reservation.Assignment, - Awaitable[reservation.Assignment] - ]]: - raise NotImplementedError() - - @property - def list_assignments(self) -> Callable[ - [reservation.ListAssignmentsRequest], - Union[ - reservation.ListAssignmentsResponse, - Awaitable[reservation.ListAssignmentsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_assignment(self) -> Callable[ - [reservation.DeleteAssignmentRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def search_assignments(self) -> Callable[ - [reservation.SearchAssignmentsRequest], - Union[ - reservation.SearchAssignmentsResponse, - Awaitable[reservation.SearchAssignmentsResponse] - ]]: - raise NotImplementedError() - - @property - def search_all_assignments(self) -> Callable[ - [reservation.SearchAllAssignmentsRequest], - Union[ - reservation.SearchAllAssignmentsResponse, - Awaitable[reservation.SearchAllAssignmentsResponse] - ]]: - raise NotImplementedError() - - @property - def move_assignment(self) -> Callable[ - [reservation.MoveAssignmentRequest], - Union[ - reservation.Assignment, - Awaitable[reservation.Assignment] - ]]: - raise NotImplementedError() - - @property - def update_assignment(self) -> Callable[ - [reservation.UpdateAssignmentRequest], - Union[ - reservation.Assignment, - Awaitable[reservation.Assignment] - ]]: - raise NotImplementedError() - - @property - def get_bi_reservation(self) -> Callable[ - [reservation.GetBiReservationRequest], - Union[ - reservation.BiReservation, - Awaitable[reservation.BiReservation] - ]]: - raise NotImplementedError() - - @property - def update_bi_reservation(self) -> Callable[ - [reservation.UpdateBiReservationRequest], - Union[ - reservation.BiReservation, - Awaitable[reservation.BiReservation] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'ReservationServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py deleted file mode 100644 index 25e8ed1772cf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py +++ /dev/null @@ -1,1078 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import empty_pb2 # type: ignore -from .base import ReservationServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ReservationServiceGrpcTransport(ReservationServiceTransport): - """gRPC backend transport for ReservationService. - - This API allows users to manage their BigQuery reservations. - - A reservation provides computational resource guarantees, in the - form of `slots `__, to - users. A slot is a unit of computational power in BigQuery, and - serves as the basic unit of parallelism. In a scan of a - multi-partitioned table, a single slot operates on a single - partition of the table. A reservation resource exists as a child - resource of the admin project and location, e.g.: - ``projects/myproject/locations/US/reservations/reservationName``. - - A capacity commitment is a way to purchase compute capacity for - BigQuery jobs (in the form of slots) with some committed period of - usage. A capacity commitment resource exists as a child resource of - the admin project and location, e.g.: - ``projects/myproject/locations/US/capacityCommitments/id``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'bigqueryreservation.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryreservation.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'bigqueryreservation.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_reservation(self) -> Callable[ - [gcbr_reservation.CreateReservationRequest], - gcbr_reservation.Reservation]: - r"""Return a callable for the create reservation method over gRPC. - - Creates a new reservation resource. - - Returns: - Callable[[~.CreateReservationRequest], - ~.Reservation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_reservation' not in self._stubs: - self._stubs['create_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservation', - request_serializer=gcbr_reservation.CreateReservationRequest.serialize, - response_deserializer=gcbr_reservation.Reservation.deserialize, - ) - return self._stubs['create_reservation'] - - @property - def list_reservations(self) -> Callable[ - [reservation.ListReservationsRequest], - reservation.ListReservationsResponse]: - r"""Return a callable for the list reservations method over gRPC. - - Lists all the reservations for the project in the - specified location. - - Returns: - Callable[[~.ListReservationsRequest], - ~.ListReservationsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_reservations' not in self._stubs: - self._stubs['list_reservations'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/ListReservations', - request_serializer=reservation.ListReservationsRequest.serialize, - response_deserializer=reservation.ListReservationsResponse.deserialize, - ) - return self._stubs['list_reservations'] - - @property - def get_reservation(self) -> Callable[ - [reservation.GetReservationRequest], - reservation.Reservation]: - r"""Return a callable for the get reservation method over gRPC. - - Returns information about the reservation. - - Returns: - Callable[[~.GetReservationRequest], - ~.Reservation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_reservation' not in self._stubs: - self._stubs['get_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/GetReservation', - request_serializer=reservation.GetReservationRequest.serialize, - response_deserializer=reservation.Reservation.deserialize, - ) - return self._stubs['get_reservation'] - - @property - def delete_reservation(self) -> Callable[ - [reservation.DeleteReservationRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete reservation method over gRPC. - - Deletes a reservation. Returns - ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has - assignments. - - Returns: - Callable[[~.DeleteReservationRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_reservation' not in self._stubs: - self._stubs['delete_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservation', - request_serializer=reservation.DeleteReservationRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_reservation'] - - @property - def update_reservation(self) -> Callable[ - [gcbr_reservation.UpdateReservationRequest], - gcbr_reservation.Reservation]: - r"""Return a callable for the update reservation method over gRPC. - - Updates an existing reservation resource. - - Returns: - Callable[[~.UpdateReservationRequest], - ~.Reservation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_reservation' not in self._stubs: - self._stubs['update_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateReservation', - request_serializer=gcbr_reservation.UpdateReservationRequest.serialize, - response_deserializer=gcbr_reservation.Reservation.deserialize, - ) - return self._stubs['update_reservation'] - - @property - def failover_reservation(self) -> Callable[ - [reservation.FailoverReservationRequest], - reservation.Reservation]: - r"""Return a callable for the failover reservation method over gRPC. - - Fail over a reservation to the secondary location. The operation - should be done in the current secondary location, which will be - promoted to the new primary location for the reservation. - Attempting to failover a reservation in the current primary - location will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.FailoverReservationRequest], - ~.Reservation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'failover_reservation' not in self._stubs: - self._stubs['failover_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/FailoverReservation', - request_serializer=reservation.FailoverReservationRequest.serialize, - response_deserializer=reservation.Reservation.deserialize, - ) - return self._stubs['failover_reservation'] - - @property - def create_capacity_commitment(self) -> Callable[ - [reservation.CreateCapacityCommitmentRequest], - reservation.CapacityCommitment]: - r"""Return a callable for the create capacity commitment method over gRPC. - - Creates a new capacity commitment resource. - - Returns: - Callable[[~.CreateCapacityCommitmentRequest], - ~.CapacityCommitment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_capacity_commitment' not in self._stubs: - self._stubs['create_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/CreateCapacityCommitment', - request_serializer=reservation.CreateCapacityCommitmentRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['create_capacity_commitment'] - - @property - def list_capacity_commitments(self) -> Callable[ - [reservation.ListCapacityCommitmentsRequest], - reservation.ListCapacityCommitmentsResponse]: - r"""Return a callable for the list capacity commitments method over gRPC. - - Lists all the capacity commitments for the admin - project. - - Returns: - Callable[[~.ListCapacityCommitmentsRequest], - ~.ListCapacityCommitmentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_capacity_commitments' not in self._stubs: - self._stubs['list_capacity_commitments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/ListCapacityCommitments', - request_serializer=reservation.ListCapacityCommitmentsRequest.serialize, - response_deserializer=reservation.ListCapacityCommitmentsResponse.deserialize, - ) - return self._stubs['list_capacity_commitments'] - - @property - def get_capacity_commitment(self) -> Callable[ - [reservation.GetCapacityCommitmentRequest], - reservation.CapacityCommitment]: - r"""Return a callable for the get capacity commitment method over gRPC. - - Returns information about the capacity commitment. - - Returns: - Callable[[~.GetCapacityCommitmentRequest], - ~.CapacityCommitment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_capacity_commitment' not in self._stubs: - self._stubs['get_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/GetCapacityCommitment', - request_serializer=reservation.GetCapacityCommitmentRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['get_capacity_commitment'] - - @property - def delete_capacity_commitment(self) -> Callable[ - [reservation.DeleteCapacityCommitmentRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete capacity commitment method over gRPC. - - Deletes a capacity commitment. Attempting to delete capacity - commitment before its commitment_end_time will fail with the - error code ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.DeleteCapacityCommitmentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_capacity_commitment' not in self._stubs: - self._stubs['delete_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/DeleteCapacityCommitment', - request_serializer=reservation.DeleteCapacityCommitmentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_capacity_commitment'] - - @property - def update_capacity_commitment(self) -> Callable[ - [reservation.UpdateCapacityCommitmentRequest], - reservation.CapacityCommitment]: - r"""Return a callable for the update capacity commitment method over gRPC. - - Updates an existing capacity commitment. - - Only ``plan`` and ``renewal_plan`` fields can be updated. - - Plan can only be changed to a plan of a longer commitment - period. Attempting to change to a plan with shorter commitment - period will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.UpdateCapacityCommitmentRequest], - ~.CapacityCommitment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_capacity_commitment' not in self._stubs: - self._stubs['update_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateCapacityCommitment', - request_serializer=reservation.UpdateCapacityCommitmentRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['update_capacity_commitment'] - - @property - def split_capacity_commitment(self) -> Callable[ - [reservation.SplitCapacityCommitmentRequest], - reservation.SplitCapacityCommitmentResponse]: - r"""Return a callable for the split capacity commitment method over gRPC. - - Splits capacity commitment to two commitments of the same plan - and ``commitment_end_time``. - - A common use case is to enable downgrading commitments. - - For example, in order to downgrade from 10000 slots to 8000, you - might split a 10000 capacity commitment into commitments of 2000 - and 8000. Then, you delete the first one after the commitment - end time passes. - - Returns: - Callable[[~.SplitCapacityCommitmentRequest], - ~.SplitCapacityCommitmentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'split_capacity_commitment' not in self._stubs: - self._stubs['split_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/SplitCapacityCommitment', - request_serializer=reservation.SplitCapacityCommitmentRequest.serialize, - response_deserializer=reservation.SplitCapacityCommitmentResponse.deserialize, - ) - return self._stubs['split_capacity_commitment'] - - @property - def merge_capacity_commitments(self) -> Callable[ - [reservation.MergeCapacityCommitmentsRequest], - reservation.CapacityCommitment]: - r"""Return a callable for the merge capacity commitments method over gRPC. - - Merges capacity commitments of the same plan into a single - commitment. - - The resulting capacity commitment has the greater - commitment_end_time out of the to-be-merged capacity - commitments. - - Attempting to merge capacity commitments of different plan will - fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.MergeCapacityCommitmentsRequest], - ~.CapacityCommitment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'merge_capacity_commitments' not in self._stubs: - self._stubs['merge_capacity_commitments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/MergeCapacityCommitments', - request_serializer=reservation.MergeCapacityCommitmentsRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['merge_capacity_commitments'] - - @property - def create_assignment(self) -> Callable[ - [reservation.CreateAssignmentRequest], - reservation.Assignment]: - r"""Return a callable for the create assignment method over gRPC. - - Creates an assignment object which allows the given project to - submit jobs of a certain type using slots from the specified - reservation. - - Currently a resource (project, folder, organization) can only - have one assignment per each (job_type, location) combination, - and that reservation will be used for all jobs of the matching - type. - - Different assignments can be created on different levels of the - projects, folders or organization hierarchy. During query - execution, the assignment is looked up at the project, folder - and organization levels in that order. The first assignment - found is applied to the query. - - When creating assignments, it does not matter if other - assignments exist at higher levels. - - Example: - - - The organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Assignments for all three entities (``organizationA``, - ``project1``, and ``project2``) could all be created and - mapped to the same or different reservations. - - "None" assignments represent an absence of the assignment. - Projects assigned to None use on-demand pricing. To create a - "None" assignment, use "none" as a reservation_id in the parent. - Example parent: - ``projects/myproject/locations/US/reservations/none``. - - Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not - have 'bigquery.admin' permissions on the project using the - reservation and the project that owns this reservation. - - Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of - the assignment does not match location of the reservation. - - Returns: - Callable[[~.CreateAssignmentRequest], - ~.Assignment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_assignment' not in self._stubs: - self._stubs['create_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/CreateAssignment', - request_serializer=reservation.CreateAssignmentRequest.serialize, - response_deserializer=reservation.Assignment.deserialize, - ) - return self._stubs['create_assignment'] - - @property - def list_assignments(self) -> Callable[ - [reservation.ListAssignmentsRequest], - reservation.ListAssignmentsResponse]: - r"""Return a callable for the list assignments method over gRPC. - - Lists assignments. - - Only explicitly created assignments will be returned. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, ListAssignments will just return the above two - assignments for reservation ``res1``, and no expansion/merge - will happen. - - The wildcard "-" can be used for reservations in the request. In - that case all assignments belongs to the specified project and - location will be listed. - - **Note** "-" cannot be used for projects nor locations. - - Returns: - Callable[[~.ListAssignmentsRequest], - ~.ListAssignmentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assignments' not in self._stubs: - self._stubs['list_assignments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/ListAssignments', - request_serializer=reservation.ListAssignmentsRequest.serialize, - response_deserializer=reservation.ListAssignmentsResponse.deserialize, - ) - return self._stubs['list_assignments'] - - @property - def delete_assignment(self) -> Callable[ - [reservation.DeleteAssignmentRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete assignment method over gRPC. - - Deletes a assignment. No expansion will happen. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, deletion of the ```` - assignment won't affect the other assignment - ````. After said deletion, queries from - ``project1`` will still use ``res1`` while queries from - ``project2`` will switch to use on-demand mode. - - Returns: - Callable[[~.DeleteAssignmentRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_assignment' not in self._stubs: - self._stubs['delete_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/DeleteAssignment', - request_serializer=reservation.DeleteAssignmentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_assignment'] - - @property - def search_assignments(self) -> Callable[ - [reservation.SearchAssignmentsRequest], - reservation.SearchAssignmentsResponse]: - r"""Return a callable for the search assignments method over gRPC. - - Deprecated: Looks up assignments for a specified resource for a - particular region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - **Note** "-" cannot be used for projects nor locations. - - Returns: - Callable[[~.SearchAssignmentsRequest], - ~.SearchAssignmentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_assignments' not in self._stubs: - self._stubs['search_assignments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/SearchAssignments', - request_serializer=reservation.SearchAssignmentsRequest.serialize, - response_deserializer=reservation.SearchAssignmentsResponse.deserialize, - ) - return self._stubs['search_assignments'] - - @property - def search_all_assignments(self) -> Callable[ - [reservation.SearchAllAssignmentsRequest], - reservation.SearchAllAssignmentsResponse]: - r"""Return a callable for the search all assignments method over gRPC. - - Looks up assignments for a specified resource for a particular - region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - Returns: - Callable[[~.SearchAllAssignmentsRequest], - ~.SearchAllAssignmentsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_assignments' not in self._stubs: - self._stubs['search_all_assignments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/SearchAllAssignments', - request_serializer=reservation.SearchAllAssignmentsRequest.serialize, - response_deserializer=reservation.SearchAllAssignmentsResponse.deserialize, - ) - return self._stubs['search_all_assignments'] - - @property - def move_assignment(self) -> Callable[ - [reservation.MoveAssignmentRequest], - reservation.Assignment]: - r"""Return a callable for the move assignment method over gRPC. - - Moves an assignment under a new reservation. - - This differs from removing an existing assignment and - recreating a new one by providing a transactional change - that ensures an assignee always has an associated - reservation. - - Returns: - Callable[[~.MoveAssignmentRequest], - ~.Assignment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'move_assignment' not in self._stubs: - self._stubs['move_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/MoveAssignment', - request_serializer=reservation.MoveAssignmentRequest.serialize, - response_deserializer=reservation.Assignment.deserialize, - ) - return self._stubs['move_assignment'] - - @property - def update_assignment(self) -> Callable[ - [reservation.UpdateAssignmentRequest], - reservation.Assignment]: - r"""Return a callable for the update assignment method over gRPC. - - Updates an existing assignment. - - Only the ``priority`` field can be updated. - - Returns: - Callable[[~.UpdateAssignmentRequest], - ~.Assignment]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_assignment' not in self._stubs: - self._stubs['update_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateAssignment', - request_serializer=reservation.UpdateAssignmentRequest.serialize, - response_deserializer=reservation.Assignment.deserialize, - ) - return self._stubs['update_assignment'] - - @property - def get_bi_reservation(self) -> Callable[ - [reservation.GetBiReservationRequest], - reservation.BiReservation]: - r"""Return a callable for the get bi reservation method over gRPC. - - Retrieves a BI reservation. - - Returns: - Callable[[~.GetBiReservationRequest], - ~.BiReservation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_bi_reservation' not in self._stubs: - self._stubs['get_bi_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/GetBiReservation', - request_serializer=reservation.GetBiReservationRequest.serialize, - response_deserializer=reservation.BiReservation.deserialize, - ) - return self._stubs['get_bi_reservation'] - - @property - def update_bi_reservation(self) -> Callable[ - [reservation.UpdateBiReservationRequest], - reservation.BiReservation]: - r"""Return a callable for the update bi reservation method over gRPC. - - Updates a BI reservation. - - Only fields specified in the ``field_mask`` are updated. - - A singleton BI reservation always exists with default size 0. In - order to reserve BI capacity it needs to be updated to an amount - greater than 0. In order to release BI capacity reservation size - must be set to 0. - - Returns: - Callable[[~.UpdateBiReservationRequest], - ~.BiReservation]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_bi_reservation' not in self._stubs: - self._stubs['update_bi_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateBiReservation', - request_serializer=reservation.UpdateBiReservationRequest.serialize, - response_deserializer=reservation.BiReservation.deserialize, - ) - return self._stubs['update_bi_reservation'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'ReservationServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py deleted file mode 100644 index f0dd6939280e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1303 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import empty_pb2 # type: ignore -from .base import ReservationServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import ReservationServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class ReservationServiceGrpcAsyncIOTransport(ReservationServiceTransport): - """gRPC AsyncIO backend transport for ReservationService. - - This API allows users to manage their BigQuery reservations. - - A reservation provides computational resource guarantees, in the - form of `slots `__, to - users. A slot is a unit of computational power in BigQuery, and - serves as the basic unit of parallelism. In a scan of a - multi-partitioned table, a single slot operates on a single - partition of the table. A reservation resource exists as a child - resource of the admin project and location, e.g.: - ``projects/myproject/locations/US/reservations/reservationName``. - - A capacity commitment is a way to purchase compute capacity for - BigQuery jobs (in the form of slots) with some committed period of - usage. A capacity commitment resource exists as a child resource of - the admin project and location, e.g.: - ``projects/myproject/locations/US/capacityCommitments/id``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'bigqueryreservation.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'bigqueryreservation.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryreservation.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_reservation(self) -> Callable[ - [gcbr_reservation.CreateReservationRequest], - Awaitable[gcbr_reservation.Reservation]]: - r"""Return a callable for the create reservation method over gRPC. - - Creates a new reservation resource. - - Returns: - Callable[[~.CreateReservationRequest], - Awaitable[~.Reservation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_reservation' not in self._stubs: - self._stubs['create_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservation', - request_serializer=gcbr_reservation.CreateReservationRequest.serialize, - response_deserializer=gcbr_reservation.Reservation.deserialize, - ) - return self._stubs['create_reservation'] - - @property - def list_reservations(self) -> Callable[ - [reservation.ListReservationsRequest], - Awaitable[reservation.ListReservationsResponse]]: - r"""Return a callable for the list reservations method over gRPC. - - Lists all the reservations for the project in the - specified location. - - Returns: - Callable[[~.ListReservationsRequest], - Awaitable[~.ListReservationsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_reservations' not in self._stubs: - self._stubs['list_reservations'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/ListReservations', - request_serializer=reservation.ListReservationsRequest.serialize, - response_deserializer=reservation.ListReservationsResponse.deserialize, - ) - return self._stubs['list_reservations'] - - @property - def get_reservation(self) -> Callable[ - [reservation.GetReservationRequest], - Awaitable[reservation.Reservation]]: - r"""Return a callable for the get reservation method over gRPC. - - Returns information about the reservation. - - Returns: - Callable[[~.GetReservationRequest], - Awaitable[~.Reservation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_reservation' not in self._stubs: - self._stubs['get_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/GetReservation', - request_serializer=reservation.GetReservationRequest.serialize, - response_deserializer=reservation.Reservation.deserialize, - ) - return self._stubs['get_reservation'] - - @property - def delete_reservation(self) -> Callable[ - [reservation.DeleteReservationRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete reservation method over gRPC. - - Deletes a reservation. Returns - ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has - assignments. - - Returns: - Callable[[~.DeleteReservationRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_reservation' not in self._stubs: - self._stubs['delete_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservation', - request_serializer=reservation.DeleteReservationRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_reservation'] - - @property - def update_reservation(self) -> Callable[ - [gcbr_reservation.UpdateReservationRequest], - Awaitable[gcbr_reservation.Reservation]]: - r"""Return a callable for the update reservation method over gRPC. - - Updates an existing reservation resource. - - Returns: - Callable[[~.UpdateReservationRequest], - Awaitable[~.Reservation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_reservation' not in self._stubs: - self._stubs['update_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateReservation', - request_serializer=gcbr_reservation.UpdateReservationRequest.serialize, - response_deserializer=gcbr_reservation.Reservation.deserialize, - ) - return self._stubs['update_reservation'] - - @property - def failover_reservation(self) -> Callable[ - [reservation.FailoverReservationRequest], - Awaitable[reservation.Reservation]]: - r"""Return a callable for the failover reservation method over gRPC. - - Fail over a reservation to the secondary location. The operation - should be done in the current secondary location, which will be - promoted to the new primary location for the reservation. - Attempting to failover a reservation in the current primary - location will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.FailoverReservationRequest], - Awaitable[~.Reservation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'failover_reservation' not in self._stubs: - self._stubs['failover_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/FailoverReservation', - request_serializer=reservation.FailoverReservationRequest.serialize, - response_deserializer=reservation.Reservation.deserialize, - ) - return self._stubs['failover_reservation'] - - @property - def create_capacity_commitment(self) -> Callable[ - [reservation.CreateCapacityCommitmentRequest], - Awaitable[reservation.CapacityCommitment]]: - r"""Return a callable for the create capacity commitment method over gRPC. - - Creates a new capacity commitment resource. - - Returns: - Callable[[~.CreateCapacityCommitmentRequest], - Awaitable[~.CapacityCommitment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_capacity_commitment' not in self._stubs: - self._stubs['create_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/CreateCapacityCommitment', - request_serializer=reservation.CreateCapacityCommitmentRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['create_capacity_commitment'] - - @property - def list_capacity_commitments(self) -> Callable[ - [reservation.ListCapacityCommitmentsRequest], - Awaitable[reservation.ListCapacityCommitmentsResponse]]: - r"""Return a callable for the list capacity commitments method over gRPC. - - Lists all the capacity commitments for the admin - project. - - Returns: - Callable[[~.ListCapacityCommitmentsRequest], - Awaitable[~.ListCapacityCommitmentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_capacity_commitments' not in self._stubs: - self._stubs['list_capacity_commitments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/ListCapacityCommitments', - request_serializer=reservation.ListCapacityCommitmentsRequest.serialize, - response_deserializer=reservation.ListCapacityCommitmentsResponse.deserialize, - ) - return self._stubs['list_capacity_commitments'] - - @property - def get_capacity_commitment(self) -> Callable[ - [reservation.GetCapacityCommitmentRequest], - Awaitable[reservation.CapacityCommitment]]: - r"""Return a callable for the get capacity commitment method over gRPC. - - Returns information about the capacity commitment. - - Returns: - Callable[[~.GetCapacityCommitmentRequest], - Awaitable[~.CapacityCommitment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_capacity_commitment' not in self._stubs: - self._stubs['get_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/GetCapacityCommitment', - request_serializer=reservation.GetCapacityCommitmentRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['get_capacity_commitment'] - - @property - def delete_capacity_commitment(self) -> Callable[ - [reservation.DeleteCapacityCommitmentRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete capacity commitment method over gRPC. - - Deletes a capacity commitment. Attempting to delete capacity - commitment before its commitment_end_time will fail with the - error code ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.DeleteCapacityCommitmentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_capacity_commitment' not in self._stubs: - self._stubs['delete_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/DeleteCapacityCommitment', - request_serializer=reservation.DeleteCapacityCommitmentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_capacity_commitment'] - - @property - def update_capacity_commitment(self) -> Callable[ - [reservation.UpdateCapacityCommitmentRequest], - Awaitable[reservation.CapacityCommitment]]: - r"""Return a callable for the update capacity commitment method over gRPC. - - Updates an existing capacity commitment. - - Only ``plan`` and ``renewal_plan`` fields can be updated. - - Plan can only be changed to a plan of a longer commitment - period. Attempting to change to a plan with shorter commitment - period will fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.UpdateCapacityCommitmentRequest], - Awaitable[~.CapacityCommitment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_capacity_commitment' not in self._stubs: - self._stubs['update_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateCapacityCommitment', - request_serializer=reservation.UpdateCapacityCommitmentRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['update_capacity_commitment'] - - @property - def split_capacity_commitment(self) -> Callable[ - [reservation.SplitCapacityCommitmentRequest], - Awaitable[reservation.SplitCapacityCommitmentResponse]]: - r"""Return a callable for the split capacity commitment method over gRPC. - - Splits capacity commitment to two commitments of the same plan - and ``commitment_end_time``. - - A common use case is to enable downgrading commitments. - - For example, in order to downgrade from 10000 slots to 8000, you - might split a 10000 capacity commitment into commitments of 2000 - and 8000. Then, you delete the first one after the commitment - end time passes. - - Returns: - Callable[[~.SplitCapacityCommitmentRequest], - Awaitable[~.SplitCapacityCommitmentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'split_capacity_commitment' not in self._stubs: - self._stubs['split_capacity_commitment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/SplitCapacityCommitment', - request_serializer=reservation.SplitCapacityCommitmentRequest.serialize, - response_deserializer=reservation.SplitCapacityCommitmentResponse.deserialize, - ) - return self._stubs['split_capacity_commitment'] - - @property - def merge_capacity_commitments(self) -> Callable[ - [reservation.MergeCapacityCommitmentsRequest], - Awaitable[reservation.CapacityCommitment]]: - r"""Return a callable for the merge capacity commitments method over gRPC. - - Merges capacity commitments of the same plan into a single - commitment. - - The resulting capacity commitment has the greater - commitment_end_time out of the to-be-merged capacity - commitments. - - Attempting to merge capacity commitments of different plan will - fail with the error code - ``google.rpc.Code.FAILED_PRECONDITION``. - - Returns: - Callable[[~.MergeCapacityCommitmentsRequest], - Awaitable[~.CapacityCommitment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'merge_capacity_commitments' not in self._stubs: - self._stubs['merge_capacity_commitments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/MergeCapacityCommitments', - request_serializer=reservation.MergeCapacityCommitmentsRequest.serialize, - response_deserializer=reservation.CapacityCommitment.deserialize, - ) - return self._stubs['merge_capacity_commitments'] - - @property - def create_assignment(self) -> Callable[ - [reservation.CreateAssignmentRequest], - Awaitable[reservation.Assignment]]: - r"""Return a callable for the create assignment method over gRPC. - - Creates an assignment object which allows the given project to - submit jobs of a certain type using slots from the specified - reservation. - - Currently a resource (project, folder, organization) can only - have one assignment per each (job_type, location) combination, - and that reservation will be used for all jobs of the matching - type. - - Different assignments can be created on different levels of the - projects, folders or organization hierarchy. During query - execution, the assignment is looked up at the project, folder - and organization levels in that order. The first assignment - found is applied to the query. - - When creating assignments, it does not matter if other - assignments exist at higher levels. - - Example: - - - The organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Assignments for all three entities (``organizationA``, - ``project1``, and ``project2``) could all be created and - mapped to the same or different reservations. - - "None" assignments represent an absence of the assignment. - Projects assigned to None use on-demand pricing. To create a - "None" assignment, use "none" as a reservation_id in the parent. - Example parent: - ``projects/myproject/locations/US/reservations/none``. - - Returns ``google.rpc.Code.PERMISSION_DENIED`` if user does not - have 'bigquery.admin' permissions on the project using the - reservation and the project that owns this reservation. - - Returns ``google.rpc.Code.INVALID_ARGUMENT`` when location of - the assignment does not match location of the reservation. - - Returns: - Callable[[~.CreateAssignmentRequest], - Awaitable[~.Assignment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_assignment' not in self._stubs: - self._stubs['create_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/CreateAssignment', - request_serializer=reservation.CreateAssignmentRequest.serialize, - response_deserializer=reservation.Assignment.deserialize, - ) - return self._stubs['create_assignment'] - - @property - def list_assignments(self) -> Callable[ - [reservation.ListAssignmentsRequest], - Awaitable[reservation.ListAssignmentsResponse]]: - r"""Return a callable for the list assignments method over gRPC. - - Lists assignments. - - Only explicitly created assignments will be returned. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, ListAssignments will just return the above two - assignments for reservation ``res1``, and no expansion/merge - will happen. - - The wildcard "-" can be used for reservations in the request. In - that case all assignments belongs to the specified project and - location will be listed. - - **Note** "-" cannot be used for projects nor locations. - - Returns: - Callable[[~.ListAssignmentsRequest], - Awaitable[~.ListAssignmentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_assignments' not in self._stubs: - self._stubs['list_assignments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/ListAssignments', - request_serializer=reservation.ListAssignmentsRequest.serialize, - response_deserializer=reservation.ListAssignmentsResponse.deserialize, - ) - return self._stubs['list_assignments'] - - @property - def delete_assignment(self) -> Callable[ - [reservation.DeleteAssignmentRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete assignment method over gRPC. - - Deletes a assignment. No expansion will happen. - - Example: - - - Organization ``organizationA`` contains two projects, - ``project1`` and ``project2``. - - Reservation ``res1`` exists and was created previously. - - CreateAssignment was used previously to define the following - associations between entities and reservations: - ```` and ```` - - In this example, deletion of the ```` - assignment won't affect the other assignment - ````. After said deletion, queries from - ``project1`` will still use ``res1`` while queries from - ``project2`` will switch to use on-demand mode. - - Returns: - Callable[[~.DeleteAssignmentRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_assignment' not in self._stubs: - self._stubs['delete_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/DeleteAssignment', - request_serializer=reservation.DeleteAssignmentRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_assignment'] - - @property - def search_assignments(self) -> Callable[ - [reservation.SearchAssignmentsRequest], - Awaitable[reservation.SearchAssignmentsResponse]]: - r"""Return a callable for the search assignments method over gRPC. - - Deprecated: Looks up assignments for a specified resource for a - particular region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - **Note** "-" cannot be used for projects nor locations. - - Returns: - Callable[[~.SearchAssignmentsRequest], - Awaitable[~.SearchAssignmentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_assignments' not in self._stubs: - self._stubs['search_assignments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/SearchAssignments', - request_serializer=reservation.SearchAssignmentsRequest.serialize, - response_deserializer=reservation.SearchAssignmentsResponse.deserialize, - ) - return self._stubs['search_assignments'] - - @property - def search_all_assignments(self) -> Callable[ - [reservation.SearchAllAssignmentsRequest], - Awaitable[reservation.SearchAllAssignmentsResponse]]: - r"""Return a callable for the search all assignments method over gRPC. - - Looks up assignments for a specified resource for a particular - region. If the request is about a project: - - 1. Assignments created on the project will be returned if they - exist. - 2. Otherwise assignments created on the closest ancestor will be - returned. - 3. Assignments for different JobTypes will all be returned. - - The same logic applies if the request is about a folder. - - If the request is about an organization, then assignments - created on the organization will be returned (organization - doesn't have ancestors). - - Comparing to ListAssignments, there are some behavior - differences: - - 1. permission on the assignee will be verified in this API. - 2. Hierarchy lookup (project->folder->organization) happens in - this API. - 3. Parent here is ``projects/*/locations/*``, instead of - ``projects/*/locations/*reservations/*``. - - Returns: - Callable[[~.SearchAllAssignmentsRequest], - Awaitable[~.SearchAllAssignmentsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'search_all_assignments' not in self._stubs: - self._stubs['search_all_assignments'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/SearchAllAssignments', - request_serializer=reservation.SearchAllAssignmentsRequest.serialize, - response_deserializer=reservation.SearchAllAssignmentsResponse.deserialize, - ) - return self._stubs['search_all_assignments'] - - @property - def move_assignment(self) -> Callable[ - [reservation.MoveAssignmentRequest], - Awaitable[reservation.Assignment]]: - r"""Return a callable for the move assignment method over gRPC. - - Moves an assignment under a new reservation. - - This differs from removing an existing assignment and - recreating a new one by providing a transactional change - that ensures an assignee always has an associated - reservation. - - Returns: - Callable[[~.MoveAssignmentRequest], - Awaitable[~.Assignment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'move_assignment' not in self._stubs: - self._stubs['move_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/MoveAssignment', - request_serializer=reservation.MoveAssignmentRequest.serialize, - response_deserializer=reservation.Assignment.deserialize, - ) - return self._stubs['move_assignment'] - - @property - def update_assignment(self) -> Callable[ - [reservation.UpdateAssignmentRequest], - Awaitable[reservation.Assignment]]: - r"""Return a callable for the update assignment method over gRPC. - - Updates an existing assignment. - - Only the ``priority`` field can be updated. - - Returns: - Callable[[~.UpdateAssignmentRequest], - Awaitable[~.Assignment]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_assignment' not in self._stubs: - self._stubs['update_assignment'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateAssignment', - request_serializer=reservation.UpdateAssignmentRequest.serialize, - response_deserializer=reservation.Assignment.deserialize, - ) - return self._stubs['update_assignment'] - - @property - def get_bi_reservation(self) -> Callable[ - [reservation.GetBiReservationRequest], - Awaitable[reservation.BiReservation]]: - r"""Return a callable for the get bi reservation method over gRPC. - - Retrieves a BI reservation. - - Returns: - Callable[[~.GetBiReservationRequest], - Awaitable[~.BiReservation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_bi_reservation' not in self._stubs: - self._stubs['get_bi_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/GetBiReservation', - request_serializer=reservation.GetBiReservationRequest.serialize, - response_deserializer=reservation.BiReservation.deserialize, - ) - return self._stubs['get_bi_reservation'] - - @property - def update_bi_reservation(self) -> Callable[ - [reservation.UpdateBiReservationRequest], - Awaitable[reservation.BiReservation]]: - r"""Return a callable for the update bi reservation method over gRPC. - - Updates a BI reservation. - - Only fields specified in the ``field_mask`` are updated. - - A singleton BI reservation always exists with default size 0. In - order to reserve BI capacity it needs to be updated to an amount - greater than 0. In order to release BI capacity reservation size - must be set to 0. - - Returns: - Callable[[~.UpdateBiReservationRequest], - Awaitable[~.BiReservation]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_bi_reservation' not in self._stubs: - self._stubs['update_bi_reservation'] = self._logged_channel.unary_unary( - '/google.cloud.bigquery.reservation.v1.ReservationService/UpdateBiReservation', - request_serializer=reservation.UpdateBiReservationRequest.serialize, - response_deserializer=reservation.BiReservation.deserialize, - ) - return self._stubs['update_bi_reservation'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_reservation: self._wrap_method( - self.create_reservation, - default_timeout=300.0, - client_info=client_info, - ), - self.list_reservations: self._wrap_method( - self.list_reservations, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_reservation: self._wrap_method( - self.get_reservation, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_reservation: self._wrap_method( - self.delete_reservation, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_reservation: self._wrap_method( - self.update_reservation, - default_timeout=300.0, - client_info=client_info, - ), - self.failover_reservation: self._wrap_method( - self.failover_reservation, - default_timeout=300.0, - client_info=client_info, - ), - self.create_capacity_commitment: self._wrap_method( - self.create_capacity_commitment, - default_timeout=300.0, - client_info=client_info, - ), - self.list_capacity_commitments: self._wrap_method( - self.list_capacity_commitments, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_capacity_commitment: self._wrap_method( - self.get_capacity_commitment, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_capacity_commitment: self._wrap_method( - self.delete_capacity_commitment, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_capacity_commitment: self._wrap_method( - self.update_capacity_commitment, - default_timeout=300.0, - client_info=client_info, - ), - self.split_capacity_commitment: self._wrap_method( - self.split_capacity_commitment, - default_timeout=300.0, - client_info=client_info, - ), - self.merge_capacity_commitments: self._wrap_method( - self.merge_capacity_commitments, - default_timeout=300.0, - client_info=client_info, - ), - self.create_assignment: self._wrap_method( - self.create_assignment, - default_timeout=300.0, - client_info=client_info, - ), - self.list_assignments: self._wrap_method( - self.list_assignments, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_assignment: self._wrap_method( - self.delete_assignment, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.search_assignments: self._wrap_method( - self.search_assignments, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.search_all_assignments: self._wrap_method( - self.search_all_assignments, - default_timeout=None, - client_info=client_info, - ), - self.move_assignment: self._wrap_method( - self.move_assignment, - default_timeout=300.0, - client_info=client_info, - ), - self.update_assignment: self._wrap_method( - self.update_assignment, - default_timeout=None, - client_info=client_info, - ), - self.get_bi_reservation: self._wrap_method( - self.get_bi_reservation, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.update_bi_reservation: self._wrap_method( - self.update_bi_reservation, - default_timeout=300.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'ReservationServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py deleted file mode 100644 index 3d9f2b3bcb0c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py +++ /dev/null @@ -1,3915 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseReservationServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class ReservationServiceRestInterceptor: - """Interceptor for ReservationService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ReservationServiceRestTransport. - - .. code-block:: python - class MyCustomReservationServiceInterceptor(ReservationServiceRestInterceptor): - def pre_create_assignment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_assignment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_capacity_commitment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_capacity_commitment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_reservation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_assignment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_capacity_commitment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_failover_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_failover_reservation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_bi_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_bi_reservation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_capacity_commitment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_capacity_commitment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_reservation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_assignments(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_assignments(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_capacity_commitments(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_capacity_commitments(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_reservations(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_reservations(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_merge_capacity_commitments(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_merge_capacity_commitments(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_move_assignment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_move_assignment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_all_assignments(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_all_assignments(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_search_assignments(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_search_assignments(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_split_capacity_commitment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_split_capacity_commitment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_assignment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_assignment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_bi_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_bi_reservation(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_capacity_commitment(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_capacity_commitment(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_reservation(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_reservation(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ReservationServiceRestTransport(interceptor=MyCustomReservationServiceInterceptor()) - client = ReservationServiceClient(transport=transport) - - - """ - def pre_create_assignment(self, request: reservation.CreateAssignmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.CreateAssignmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_assignment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_create_assignment(self, response: reservation.Assignment) -> reservation.Assignment: - """Post-rpc interceptor for create_assignment - - DEPRECATED. Please use the `post_create_assignment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_create_assignment` interceptor runs - before the `post_create_assignment_with_metadata` interceptor. - """ - return response - - def post_create_assignment_with_metadata(self, response: reservation.Assignment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.Assignment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_assignment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_create_assignment_with_metadata` - interceptor in new development instead of the `post_create_assignment` interceptor. - When both interceptors are used, this `post_create_assignment_with_metadata` interceptor runs after the - `post_create_assignment` interceptor. The (possibly modified) response returned by - `post_create_assignment` will be passed to - `post_create_assignment_with_metadata`. - """ - return response, metadata - - def pre_create_capacity_commitment(self, request: reservation.CreateCapacityCommitmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.CreateCapacityCommitmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_capacity_commitment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_create_capacity_commitment(self, response: reservation.CapacityCommitment) -> reservation.CapacityCommitment: - """Post-rpc interceptor for create_capacity_commitment - - DEPRECATED. Please use the `post_create_capacity_commitment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_create_capacity_commitment` interceptor runs - before the `post_create_capacity_commitment_with_metadata` interceptor. - """ - return response - - def post_create_capacity_commitment_with_metadata(self, response: reservation.CapacityCommitment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_capacity_commitment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_create_capacity_commitment_with_metadata` - interceptor in new development instead of the `post_create_capacity_commitment` interceptor. - When both interceptors are used, this `post_create_capacity_commitment_with_metadata` interceptor runs after the - `post_create_capacity_commitment` interceptor. The (possibly modified) response returned by - `post_create_capacity_commitment` will be passed to - `post_create_capacity_commitment_with_metadata`. - """ - return response, metadata - - def pre_create_reservation(self, request: gcbr_reservation.CreateReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbr_reservation.CreateReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_create_reservation(self, response: gcbr_reservation.Reservation) -> gcbr_reservation.Reservation: - """Post-rpc interceptor for create_reservation - - DEPRECATED. Please use the `post_create_reservation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_create_reservation` interceptor runs - before the `post_create_reservation_with_metadata` interceptor. - """ - return response - - def post_create_reservation_with_metadata(self, response: gcbr_reservation.Reservation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbr_reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_reservation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_create_reservation_with_metadata` - interceptor in new development instead of the `post_create_reservation` interceptor. - When both interceptors are used, this `post_create_reservation_with_metadata` interceptor runs after the - `post_create_reservation` interceptor. The (possibly modified) response returned by - `post_create_reservation` will be passed to - `post_create_reservation_with_metadata`. - """ - return response, metadata - - def pre_delete_assignment(self, request: reservation.DeleteAssignmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.DeleteAssignmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_assignment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def pre_delete_capacity_commitment(self, request: reservation.DeleteCapacityCommitmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.DeleteCapacityCommitmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_capacity_commitment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def pre_delete_reservation(self, request: reservation.DeleteReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.DeleteReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def pre_failover_reservation(self, request: reservation.FailoverReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.FailoverReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for failover_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_failover_reservation(self, response: reservation.Reservation) -> reservation.Reservation: - """Post-rpc interceptor for failover_reservation - - DEPRECATED. Please use the `post_failover_reservation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_failover_reservation` interceptor runs - before the `post_failover_reservation_with_metadata` interceptor. - """ - return response - - def post_failover_reservation_with_metadata(self, response: reservation.Reservation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for failover_reservation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_failover_reservation_with_metadata` - interceptor in new development instead of the `post_failover_reservation` interceptor. - When both interceptors are used, this `post_failover_reservation_with_metadata` interceptor runs after the - `post_failover_reservation` interceptor. The (possibly modified) response returned by - `post_failover_reservation` will be passed to - `post_failover_reservation_with_metadata`. - """ - return response, metadata - - def pre_get_bi_reservation(self, request: reservation.GetBiReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.GetBiReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_bi_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_get_bi_reservation(self, response: reservation.BiReservation) -> reservation.BiReservation: - """Post-rpc interceptor for get_bi_reservation - - DEPRECATED. Please use the `post_get_bi_reservation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_get_bi_reservation` interceptor runs - before the `post_get_bi_reservation_with_metadata` interceptor. - """ - return response - - def post_get_bi_reservation_with_metadata(self, response: reservation.BiReservation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.BiReservation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_bi_reservation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_get_bi_reservation_with_metadata` - interceptor in new development instead of the `post_get_bi_reservation` interceptor. - When both interceptors are used, this `post_get_bi_reservation_with_metadata` interceptor runs after the - `post_get_bi_reservation` interceptor. The (possibly modified) response returned by - `post_get_bi_reservation` will be passed to - `post_get_bi_reservation_with_metadata`. - """ - return response, metadata - - def pre_get_capacity_commitment(self, request: reservation.GetCapacityCommitmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.GetCapacityCommitmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_capacity_commitment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_get_capacity_commitment(self, response: reservation.CapacityCommitment) -> reservation.CapacityCommitment: - """Post-rpc interceptor for get_capacity_commitment - - DEPRECATED. Please use the `post_get_capacity_commitment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_get_capacity_commitment` interceptor runs - before the `post_get_capacity_commitment_with_metadata` interceptor. - """ - return response - - def post_get_capacity_commitment_with_metadata(self, response: reservation.CapacityCommitment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_capacity_commitment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_get_capacity_commitment_with_metadata` - interceptor in new development instead of the `post_get_capacity_commitment` interceptor. - When both interceptors are used, this `post_get_capacity_commitment_with_metadata` interceptor runs after the - `post_get_capacity_commitment` interceptor. The (possibly modified) response returned by - `post_get_capacity_commitment` will be passed to - `post_get_capacity_commitment_with_metadata`. - """ - return response, metadata - - def pre_get_reservation(self, request: reservation.GetReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.GetReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_get_reservation(self, response: reservation.Reservation) -> reservation.Reservation: - """Post-rpc interceptor for get_reservation - - DEPRECATED. Please use the `post_get_reservation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_get_reservation` interceptor runs - before the `post_get_reservation_with_metadata` interceptor. - """ - return response - - def post_get_reservation_with_metadata(self, response: reservation.Reservation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_reservation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_get_reservation_with_metadata` - interceptor in new development instead of the `post_get_reservation` interceptor. - When both interceptors are used, this `post_get_reservation_with_metadata` interceptor runs after the - `post_get_reservation` interceptor. The (possibly modified) response returned by - `post_get_reservation` will be passed to - `post_get_reservation_with_metadata`. - """ - return response, metadata - - def pre_list_assignments(self, request: reservation.ListAssignmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.ListAssignmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_assignments - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_list_assignments(self, response: reservation.ListAssignmentsResponse) -> reservation.ListAssignmentsResponse: - """Post-rpc interceptor for list_assignments - - DEPRECATED. Please use the `post_list_assignments_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_list_assignments` interceptor runs - before the `post_list_assignments_with_metadata` interceptor. - """ - return response - - def post_list_assignments_with_metadata(self, response: reservation.ListAssignmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.ListAssignmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_assignments - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_list_assignments_with_metadata` - interceptor in new development instead of the `post_list_assignments` interceptor. - When both interceptors are used, this `post_list_assignments_with_metadata` interceptor runs after the - `post_list_assignments` interceptor. The (possibly modified) response returned by - `post_list_assignments` will be passed to - `post_list_assignments_with_metadata`. - """ - return response, metadata - - def pre_list_capacity_commitments(self, request: reservation.ListCapacityCommitmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.ListCapacityCommitmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_capacity_commitments - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_list_capacity_commitments(self, response: reservation.ListCapacityCommitmentsResponse) -> reservation.ListCapacityCommitmentsResponse: - """Post-rpc interceptor for list_capacity_commitments - - DEPRECATED. Please use the `post_list_capacity_commitments_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_list_capacity_commitments` interceptor runs - before the `post_list_capacity_commitments_with_metadata` interceptor. - """ - return response - - def post_list_capacity_commitments_with_metadata(self, response: reservation.ListCapacityCommitmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.ListCapacityCommitmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_capacity_commitments - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_list_capacity_commitments_with_metadata` - interceptor in new development instead of the `post_list_capacity_commitments` interceptor. - When both interceptors are used, this `post_list_capacity_commitments_with_metadata` interceptor runs after the - `post_list_capacity_commitments` interceptor. The (possibly modified) response returned by - `post_list_capacity_commitments` will be passed to - `post_list_capacity_commitments_with_metadata`. - """ - return response, metadata - - def pre_list_reservations(self, request: reservation.ListReservationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.ListReservationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_reservations - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_list_reservations(self, response: reservation.ListReservationsResponse) -> reservation.ListReservationsResponse: - """Post-rpc interceptor for list_reservations - - DEPRECATED. Please use the `post_list_reservations_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_list_reservations` interceptor runs - before the `post_list_reservations_with_metadata` interceptor. - """ - return response - - def post_list_reservations_with_metadata(self, response: reservation.ListReservationsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.ListReservationsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_reservations - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_list_reservations_with_metadata` - interceptor in new development instead of the `post_list_reservations` interceptor. - When both interceptors are used, this `post_list_reservations_with_metadata` interceptor runs after the - `post_list_reservations` interceptor. The (possibly modified) response returned by - `post_list_reservations` will be passed to - `post_list_reservations_with_metadata`. - """ - return response, metadata - - def pre_merge_capacity_commitments(self, request: reservation.MergeCapacityCommitmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.MergeCapacityCommitmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for merge_capacity_commitments - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_merge_capacity_commitments(self, response: reservation.CapacityCommitment) -> reservation.CapacityCommitment: - """Post-rpc interceptor for merge_capacity_commitments - - DEPRECATED. Please use the `post_merge_capacity_commitments_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_merge_capacity_commitments` interceptor runs - before the `post_merge_capacity_commitments_with_metadata` interceptor. - """ - return response - - def post_merge_capacity_commitments_with_metadata(self, response: reservation.CapacityCommitment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for merge_capacity_commitments - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_merge_capacity_commitments_with_metadata` - interceptor in new development instead of the `post_merge_capacity_commitments` interceptor. - When both interceptors are used, this `post_merge_capacity_commitments_with_metadata` interceptor runs after the - `post_merge_capacity_commitments` interceptor. The (possibly modified) response returned by - `post_merge_capacity_commitments` will be passed to - `post_merge_capacity_commitments_with_metadata`. - """ - return response, metadata - - def pre_move_assignment(self, request: reservation.MoveAssignmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.MoveAssignmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for move_assignment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_move_assignment(self, response: reservation.Assignment) -> reservation.Assignment: - """Post-rpc interceptor for move_assignment - - DEPRECATED. Please use the `post_move_assignment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_move_assignment` interceptor runs - before the `post_move_assignment_with_metadata` interceptor. - """ - return response - - def post_move_assignment_with_metadata(self, response: reservation.Assignment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.Assignment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for move_assignment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_move_assignment_with_metadata` - interceptor in new development instead of the `post_move_assignment` interceptor. - When both interceptors are used, this `post_move_assignment_with_metadata` interceptor runs after the - `post_move_assignment` interceptor. The (possibly modified) response returned by - `post_move_assignment` will be passed to - `post_move_assignment_with_metadata`. - """ - return response, metadata - - def pre_search_all_assignments(self, request: reservation.SearchAllAssignmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.SearchAllAssignmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_all_assignments - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_search_all_assignments(self, response: reservation.SearchAllAssignmentsResponse) -> reservation.SearchAllAssignmentsResponse: - """Post-rpc interceptor for search_all_assignments - - DEPRECATED. Please use the `post_search_all_assignments_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_search_all_assignments` interceptor runs - before the `post_search_all_assignments_with_metadata` interceptor. - """ - return response - - def post_search_all_assignments_with_metadata(self, response: reservation.SearchAllAssignmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.SearchAllAssignmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_all_assignments - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_search_all_assignments_with_metadata` - interceptor in new development instead of the `post_search_all_assignments` interceptor. - When both interceptors are used, this `post_search_all_assignments_with_metadata` interceptor runs after the - `post_search_all_assignments` interceptor. The (possibly modified) response returned by - `post_search_all_assignments` will be passed to - `post_search_all_assignments_with_metadata`. - """ - return response, metadata - - def pre_search_assignments(self, request: reservation.SearchAssignmentsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.SearchAssignmentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for search_assignments - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_search_assignments(self, response: reservation.SearchAssignmentsResponse) -> reservation.SearchAssignmentsResponse: - """Post-rpc interceptor for search_assignments - - DEPRECATED. Please use the `post_search_assignments_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_search_assignments` interceptor runs - before the `post_search_assignments_with_metadata` interceptor. - """ - return response - - def post_search_assignments_with_metadata(self, response: reservation.SearchAssignmentsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.SearchAssignmentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for search_assignments - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_search_assignments_with_metadata` - interceptor in new development instead of the `post_search_assignments` interceptor. - When both interceptors are used, this `post_search_assignments_with_metadata` interceptor runs after the - `post_search_assignments` interceptor. The (possibly modified) response returned by - `post_search_assignments` will be passed to - `post_search_assignments_with_metadata`. - """ - return response, metadata - - def pre_split_capacity_commitment(self, request: reservation.SplitCapacityCommitmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.SplitCapacityCommitmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for split_capacity_commitment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_split_capacity_commitment(self, response: reservation.SplitCapacityCommitmentResponse) -> reservation.SplitCapacityCommitmentResponse: - """Post-rpc interceptor for split_capacity_commitment - - DEPRECATED. Please use the `post_split_capacity_commitment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_split_capacity_commitment` interceptor runs - before the `post_split_capacity_commitment_with_metadata` interceptor. - """ - return response - - def post_split_capacity_commitment_with_metadata(self, response: reservation.SplitCapacityCommitmentResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.SplitCapacityCommitmentResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for split_capacity_commitment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_split_capacity_commitment_with_metadata` - interceptor in new development instead of the `post_split_capacity_commitment` interceptor. - When both interceptors are used, this `post_split_capacity_commitment_with_metadata` interceptor runs after the - `post_split_capacity_commitment` interceptor. The (possibly modified) response returned by - `post_split_capacity_commitment` will be passed to - `post_split_capacity_commitment_with_metadata`. - """ - return response, metadata - - def pre_update_assignment(self, request: reservation.UpdateAssignmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.UpdateAssignmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_assignment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_update_assignment(self, response: reservation.Assignment) -> reservation.Assignment: - """Post-rpc interceptor for update_assignment - - DEPRECATED. Please use the `post_update_assignment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_update_assignment` interceptor runs - before the `post_update_assignment_with_metadata` interceptor. - """ - return response - - def post_update_assignment_with_metadata(self, response: reservation.Assignment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.Assignment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_assignment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_update_assignment_with_metadata` - interceptor in new development instead of the `post_update_assignment` interceptor. - When both interceptors are used, this `post_update_assignment_with_metadata` interceptor runs after the - `post_update_assignment` interceptor. The (possibly modified) response returned by - `post_update_assignment` will be passed to - `post_update_assignment_with_metadata`. - """ - return response, metadata - - def pre_update_bi_reservation(self, request: reservation.UpdateBiReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.UpdateBiReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_bi_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_update_bi_reservation(self, response: reservation.BiReservation) -> reservation.BiReservation: - """Post-rpc interceptor for update_bi_reservation - - DEPRECATED. Please use the `post_update_bi_reservation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_update_bi_reservation` interceptor runs - before the `post_update_bi_reservation_with_metadata` interceptor. - """ - return response - - def post_update_bi_reservation_with_metadata(self, response: reservation.BiReservation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.BiReservation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_bi_reservation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_update_bi_reservation_with_metadata` - interceptor in new development instead of the `post_update_bi_reservation` interceptor. - When both interceptors are used, this `post_update_bi_reservation_with_metadata` interceptor runs after the - `post_update_bi_reservation` interceptor. The (possibly modified) response returned by - `post_update_bi_reservation` will be passed to - `post_update_bi_reservation_with_metadata`. - """ - return response, metadata - - def pre_update_capacity_commitment(self, request: reservation.UpdateCapacityCommitmentRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.UpdateCapacityCommitmentRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_capacity_commitment - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_update_capacity_commitment(self, response: reservation.CapacityCommitment) -> reservation.CapacityCommitment: - """Post-rpc interceptor for update_capacity_commitment - - DEPRECATED. Please use the `post_update_capacity_commitment_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_update_capacity_commitment` interceptor runs - before the `post_update_capacity_commitment_with_metadata` interceptor. - """ - return response - - def post_update_capacity_commitment_with_metadata(self, response: reservation.CapacityCommitment, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[reservation.CapacityCommitment, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_capacity_commitment - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_update_capacity_commitment_with_metadata` - interceptor in new development instead of the `post_update_capacity_commitment` interceptor. - When both interceptors are used, this `post_update_capacity_commitment_with_metadata` interceptor runs after the - `post_update_capacity_commitment` interceptor. The (possibly modified) response returned by - `post_update_capacity_commitment` will be passed to - `post_update_capacity_commitment_with_metadata`. - """ - return response, metadata - - def pre_update_reservation(self, request: gcbr_reservation.UpdateReservationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbr_reservation.UpdateReservationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_reservation - - Override in a subclass to manipulate the request or metadata - before they are sent to the ReservationService server. - """ - return request, metadata - - def post_update_reservation(self, response: gcbr_reservation.Reservation) -> gcbr_reservation.Reservation: - """Post-rpc interceptor for update_reservation - - DEPRECATED. Please use the `post_update_reservation_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the ReservationService server but before - it is returned to user code. This `post_update_reservation` interceptor runs - before the `post_update_reservation_with_metadata` interceptor. - """ - return response - - def post_update_reservation_with_metadata(self, response: gcbr_reservation.Reservation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gcbr_reservation.Reservation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_reservation - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the ReservationService server but before it is returned to user code. - - We recommend only using this `post_update_reservation_with_metadata` - interceptor in new development instead of the `post_update_reservation` interceptor. - When both interceptors are used, this `post_update_reservation_with_metadata` interceptor runs after the - `post_update_reservation` interceptor. The (possibly modified) response returned by - `post_update_reservation` will be passed to - `post_update_reservation_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class ReservationServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: ReservationServiceRestInterceptor - - -class ReservationServiceRestTransport(_BaseReservationServiceRestTransport): - """REST backend synchronous transport for ReservationService. - - This API allows users to manage their BigQuery reservations. - - A reservation provides computational resource guarantees, in the - form of `slots `__, to - users. A slot is a unit of computational power in BigQuery, and - serves as the basic unit of parallelism. In a scan of a - multi-partitioned table, a single slot operates on a single - partition of the table. A reservation resource exists as a child - resource of the admin project and location, e.g.: - ``projects/myproject/locations/US/reservations/reservationName``. - - A capacity commitment is a way to purchase compute capacity for - BigQuery jobs (in the form of slots) with some committed period of - usage. A capacity commitment resource exists as a child resource of - the admin project and location, e.g.: - ``projects/myproject/locations/US/capacityCommitments/id``. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigqueryreservation.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ReservationServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryreservation.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ReservationServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateAssignment(_BaseReservationServiceRestTransport._BaseCreateAssignment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.CreateAssignment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.CreateAssignmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.Assignment: - r"""Call the create assignment method over HTTP. - - Args: - request (~.reservation.CreateAssignmentRequest): - The request object. The request for - [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. - Note: "bigquery.reservationAssignments.create" - permission is required on the related assignee. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseCreateAssignment._get_http_options() - - request, metadata = self._interceptor.pre_create_assignment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseCreateAssignment._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseCreateAssignment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseCreateAssignment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.CreateAssignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "CreateAssignment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._CreateAssignment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.Assignment() - pb_resp = reservation.Assignment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_assignment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_assignment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.Assignment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.create_assignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "CreateAssignment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateCapacityCommitment(_BaseReservationServiceRestTransport._BaseCreateCapacityCommitment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.CreateCapacityCommitment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.CreateCapacityCommitmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.CapacityCommitment: - r"""Call the create capacity - commitment method over HTTP. - - Args: - request (~.reservation.CreateCapacityCommitmentRequest): - The request object. The request for - [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseCreateCapacityCommitment._get_http_options() - - request, metadata = self._interceptor.pre_create_capacity_commitment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseCreateCapacityCommitment._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseCreateCapacityCommitment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseCreateCapacityCommitment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.CreateCapacityCommitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "CreateCapacityCommitment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._CreateCapacityCommitment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.CapacityCommitment() - pb_resp = reservation.CapacityCommitment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_capacity_commitment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_capacity_commitment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.CapacityCommitment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.create_capacity_commitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "CreateCapacityCommitment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _CreateReservation(_BaseReservationServiceRestTransport._BaseCreateReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.CreateReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcbr_reservation.CreateReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcbr_reservation.Reservation: - r"""Call the create reservation method over HTTP. - - Args: - request (~.gcbr_reservation.CreateReservationRequest): - The request object. The request for - [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcbr_reservation.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseCreateReservation._get_http_options() - - request, metadata = self._interceptor.pre_create_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseCreateReservation._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseCreateReservation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseCreateReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.CreateReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "CreateReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._CreateReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcbr_reservation.Reservation() - pb_resp = gcbr_reservation.Reservation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_reservation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_reservation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcbr_reservation.Reservation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.create_reservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "CreateReservation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteAssignment(_BaseReservationServiceRestTransport._BaseDeleteAssignment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.DeleteAssignment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.DeleteAssignmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete assignment method over HTTP. - - Args: - request (~.reservation.DeleteAssignmentRequest): - The request object. The request for - [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. - Note: "bigquery.reservationAssignments.delete" - permission is required on the related assignee. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_http_options() - - request, metadata = self._interceptor.pre_delete_assignment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.DeleteAssignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "DeleteAssignment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._DeleteAssignment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteCapacityCommitment(_BaseReservationServiceRestTransport._BaseDeleteCapacityCommitment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.DeleteCapacityCommitment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.DeleteCapacityCommitmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete capacity - commitment method over HTTP. - - Args: - request (~.reservation.DeleteCapacityCommitmentRequest): - The request object. The request for - [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseReservationServiceRestTransport._BaseDeleteCapacityCommitment._get_http_options() - - request, metadata = self._interceptor.pre_delete_capacity_commitment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseDeleteCapacityCommitment._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseDeleteCapacityCommitment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.DeleteCapacityCommitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "DeleteCapacityCommitment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._DeleteCapacityCommitment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteReservation(_BaseReservationServiceRestTransport._BaseDeleteReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.DeleteReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.DeleteReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete reservation method over HTTP. - - Args: - request (~.reservation.DeleteReservationRequest): - The request object. The request for - [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseReservationServiceRestTransport._BaseDeleteReservation._get_http_options() - - request, metadata = self._interceptor.pre_delete_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseDeleteReservation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseDeleteReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.DeleteReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "DeleteReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._DeleteReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _FailoverReservation(_BaseReservationServiceRestTransport._BaseFailoverReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.FailoverReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.FailoverReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.Reservation: - r"""Call the failover reservation method over HTTP. - - Args: - request (~.reservation.FailoverReservationRequest): - The request object. The request for - ReservationService.FailoverReservation. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseFailoverReservation._get_http_options() - - request, metadata = self._interceptor.pre_failover_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseFailoverReservation._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseFailoverReservation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseFailoverReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.FailoverReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "FailoverReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._FailoverReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.Reservation() - pb_resp = reservation.Reservation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_failover_reservation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_failover_reservation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.Reservation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.failover_reservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "FailoverReservation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetBiReservation(_BaseReservationServiceRestTransport._BaseGetBiReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.GetBiReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.GetBiReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.BiReservation: - r"""Call the get bi reservation method over HTTP. - - Args: - request (~.reservation.GetBiReservationRequest): - The request object. A request to get a singleton BI - reservation. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.BiReservation: - Represents a BI Reservation. - """ - - http_options = _BaseReservationServiceRestTransport._BaseGetBiReservation._get_http_options() - - request, metadata = self._interceptor.pre_get_bi_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseGetBiReservation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseGetBiReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetBiReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetBiReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._GetBiReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.BiReservation() - pb_resp = reservation.BiReservation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_bi_reservation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_bi_reservation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.BiReservation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_bi_reservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetBiReservation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetCapacityCommitment(_BaseReservationServiceRestTransport._BaseGetCapacityCommitment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.GetCapacityCommitment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.GetCapacityCommitmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.CapacityCommitment: - r"""Call the get capacity commitment method over HTTP. - - Args: - request (~.reservation.GetCapacityCommitmentRequest): - The request object. The request for - [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseGetCapacityCommitment._get_http_options() - - request, metadata = self._interceptor.pre_get_capacity_commitment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseGetCapacityCommitment._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseGetCapacityCommitment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetCapacityCommitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetCapacityCommitment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._GetCapacityCommitment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.CapacityCommitment() - pb_resp = reservation.CapacityCommitment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_capacity_commitment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_capacity_commitment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.CapacityCommitment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_capacity_commitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetCapacityCommitment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _GetReservation(_BaseReservationServiceRestTransport._BaseGetReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.GetReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.GetReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.Reservation: - r"""Call the get reservation method over HTTP. - - Args: - request (~.reservation.GetReservationRequest): - The request object. The request for - [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseGetReservation._get_http_options() - - request, metadata = self._interceptor.pre_get_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseGetReservation._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseGetReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._GetReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.Reservation() - pb_resp = reservation.Reservation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_reservation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_reservation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.Reservation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_reservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetReservation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListAssignments(_BaseReservationServiceRestTransport._BaseListAssignments, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.ListAssignments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.ListAssignmentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.ListAssignmentsResponse: - r"""Call the list assignments method over HTTP. - - Args: - request (~.reservation.ListAssignmentsRequest): - The request object. The request for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.ListAssignmentsResponse: - The response for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseListAssignments._get_http_options() - - request, metadata = self._interceptor.pre_list_assignments(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseListAssignments._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseListAssignments._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListAssignments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListAssignments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._ListAssignments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.ListAssignmentsResponse() - pb_resp = reservation.ListAssignmentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_assignments(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assignments_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.ListAssignmentsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_assignments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListAssignments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListCapacityCommitments(_BaseReservationServiceRestTransport._BaseListCapacityCommitments, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.ListCapacityCommitments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.ListCapacityCommitmentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.ListCapacityCommitmentsResponse: - r"""Call the list capacity commitments method over HTTP. - - Args: - request (~.reservation.ListCapacityCommitmentsRequest): - The request object. The request for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.ListCapacityCommitmentsResponse: - The response for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_http_options() - - request, metadata = self._interceptor.pre_list_capacity_commitments(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListCapacityCommitments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListCapacityCommitments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._ListCapacityCommitments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.ListCapacityCommitmentsResponse() - pb_resp = reservation.ListCapacityCommitmentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_capacity_commitments(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_capacity_commitments_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.ListCapacityCommitmentsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_capacity_commitments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListCapacityCommitments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListReservations(_BaseReservationServiceRestTransport._BaseListReservations, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.ListReservations") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.ListReservationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.ListReservationsResponse: - r"""Call the list reservations method over HTTP. - - Args: - request (~.reservation.ListReservationsRequest): - The request object. The request for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.ListReservationsResponse: - The response for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseListReservations._get_http_options() - - request, metadata = self._interceptor.pre_list_reservations(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseListReservations._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseListReservations._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListReservations", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListReservations", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._ListReservations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.ListReservationsResponse() - pb_resp = reservation.ListReservationsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_reservations(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_reservations_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.ListReservationsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_reservations", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListReservations", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _MergeCapacityCommitments(_BaseReservationServiceRestTransport._BaseMergeCapacityCommitments, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.MergeCapacityCommitments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.MergeCapacityCommitmentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.CapacityCommitment: - r"""Call the merge capacity - commitments method over HTTP. - - Args: - request (~.reservation.MergeCapacityCommitmentsRequest): - The request object. The request for - [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_http_options() - - request, metadata = self._interceptor.pre_merge_capacity_commitments(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.MergeCapacityCommitments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MergeCapacityCommitments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._MergeCapacityCommitments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.CapacityCommitment() - pb_resp = reservation.CapacityCommitment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_merge_capacity_commitments(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_merge_capacity_commitments_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.CapacityCommitment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.merge_capacity_commitments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MergeCapacityCommitments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _MoveAssignment(_BaseReservationServiceRestTransport._BaseMoveAssignment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.MoveAssignment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.MoveAssignmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.Assignment: - r"""Call the move assignment method over HTTP. - - Args: - request (~.reservation.MoveAssignmentRequest): - The request object. The request for - [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. - - **Note**: "bigquery.reservationAssignments.create" - permission is required on the destination_id. - - **Note**: "bigquery.reservationAssignments.create" and - "bigquery.reservationAssignments.delete" permission are - required on the related assignee. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_http_options() - - request, metadata = self._interceptor.pre_move_assignment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.MoveAssignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MoveAssignment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._MoveAssignment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.Assignment() - pb_resp = reservation.Assignment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_move_assignment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_move_assignment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.Assignment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.move_assignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MoveAssignment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchAllAssignments(_BaseReservationServiceRestTransport._BaseSearchAllAssignments, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.SearchAllAssignments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.SearchAllAssignmentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.SearchAllAssignmentsResponse: - r"""Call the search all assignments method over HTTP. - - Args: - request (~.reservation.SearchAllAssignmentsRequest): - The request object. The request for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.SearchAllAssignmentsResponse: - The response for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_http_options() - - request, metadata = self._interceptor.pre_search_all_assignments(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SearchAllAssignments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAllAssignments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._SearchAllAssignments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.SearchAllAssignmentsResponse() - pb_resp = reservation.SearchAllAssignmentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_all_assignments(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_assignments_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.SearchAllAssignmentsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.search_all_assignments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAllAssignments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SearchAssignments(_BaseReservationServiceRestTransport._BaseSearchAssignments, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.SearchAssignments") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: reservation.SearchAssignmentsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.SearchAssignmentsResponse: - r"""Call the search assignments method over HTTP. - - Args: - request (~.reservation.SearchAssignmentsRequest): - The request object. The request for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.SearchAssignmentsResponse: - The response for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_http_options() - - request, metadata = self._interceptor.pre_search_assignments(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SearchAssignments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAssignments", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._SearchAssignments._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.SearchAssignmentsResponse() - pb_resp = reservation.SearchAssignmentsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_search_assignments(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_assignments_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.SearchAssignmentsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.search_assignments", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAssignments", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _SplitCapacityCommitment(_BaseReservationServiceRestTransport._BaseSplitCapacityCommitment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.SplitCapacityCommitment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.SplitCapacityCommitmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.SplitCapacityCommitmentResponse: - r"""Call the split capacity commitment method over HTTP. - - Args: - request (~.reservation.SplitCapacityCommitmentRequest): - The request object. The request for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.SplitCapacityCommitmentResponse: - The response for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_http_options() - - request, metadata = self._interceptor.pre_split_capacity_commitment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SplitCapacityCommitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SplitCapacityCommitment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._SplitCapacityCommitment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.SplitCapacityCommitmentResponse() - pb_resp = reservation.SplitCapacityCommitmentResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_split_capacity_commitment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_split_capacity_commitment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.SplitCapacityCommitmentResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.split_capacity_commitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SplitCapacityCommitment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateAssignment(_BaseReservationServiceRestTransport._BaseUpdateAssignment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.UpdateAssignment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.UpdateAssignmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.Assignment: - r"""Call the update assignment method over HTTP. - - Args: - request (~.reservation.UpdateAssignmentRequest): - The request object. The request for - [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseUpdateAssignment._get_http_options() - - request, metadata = self._interceptor.pre_update_assignment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseUpdateAssignment._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseUpdateAssignment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseUpdateAssignment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.UpdateAssignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateAssignment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._UpdateAssignment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.Assignment() - pb_resp = reservation.Assignment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_assignment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_assignment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.Assignment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.update_assignment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateAssignment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBiReservation(_BaseReservationServiceRestTransport._BaseUpdateBiReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.UpdateBiReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.UpdateBiReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.BiReservation: - r"""Call the update bi reservation method over HTTP. - - Args: - request (~.reservation.UpdateBiReservationRequest): - The request object. A request to update a BI reservation. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.BiReservation: - Represents a BI Reservation. - """ - - http_options = _BaseReservationServiceRestTransport._BaseUpdateBiReservation._get_http_options() - - request, metadata = self._interceptor.pre_update_bi_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseUpdateBiReservation._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseUpdateBiReservation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseUpdateBiReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.UpdateBiReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateBiReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._UpdateBiReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.BiReservation() - pb_resp = reservation.BiReservation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_bi_reservation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_bi_reservation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.BiReservation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.update_bi_reservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateBiReservation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateCapacityCommitment(_BaseReservationServiceRestTransport._BaseUpdateCapacityCommitment, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.UpdateCapacityCommitment") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: reservation.UpdateCapacityCommitmentRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> reservation.CapacityCommitment: - r"""Call the update capacity - commitment method over HTTP. - - Args: - request (~.reservation.UpdateCapacityCommitmentRequest): - The request object. The request for - [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. - - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. - - A capacity commitment resource exists as - a child resource of the admin project. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseUpdateCapacityCommitment._get_http_options() - - request, metadata = self._interceptor.pre_update_capacity_commitment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseUpdateCapacityCommitment._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseUpdateCapacityCommitment._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseUpdateCapacityCommitment._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.UpdateCapacityCommitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateCapacityCommitment", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._UpdateCapacityCommitment._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = reservation.CapacityCommitment() - pb_resp = reservation.CapacityCommitment.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_capacity_commitment(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_capacity_commitment_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = reservation.CapacityCommitment.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.update_capacity_commitment", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateCapacityCommitment", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateReservation(_BaseReservationServiceRestTransport._BaseUpdateReservation, ReservationServiceRestStub): - def __hash__(self): - return hash("ReservationServiceRestTransport.UpdateReservation") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: gcbr_reservation.UpdateReservationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gcbr_reservation.Reservation: - r"""Call the update reservation method over HTTP. - - Args: - request (~.gcbr_reservation.UpdateReservationRequest): - The request object. The request for - [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.gcbr_reservation.Reservation: - A reservation is a mechanism used to - guarantee slots to users. - - """ - - http_options = _BaseReservationServiceRestTransport._BaseUpdateReservation._get_http_options() - - request, metadata = self._interceptor.pre_update_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseUpdateReservation._get_transcoded_request(http_options, request) - - body = _BaseReservationServiceRestTransport._BaseUpdateReservation._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseUpdateReservation._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.UpdateReservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateReservation", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = ReservationServiceRestTransport._UpdateReservation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = gcbr_reservation.Reservation() - pb_resp = gcbr_reservation.Reservation.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_reservation(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_reservation_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = gcbr_reservation.Reservation.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.update_reservation", - extra = { - "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "UpdateReservation", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_assignment(self) -> Callable[ - [reservation.CreateAssignmentRequest], - reservation.Assignment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateAssignment(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_capacity_commitment(self) -> Callable[ - [reservation.CreateCapacityCommitmentRequest], - reservation.CapacityCommitment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_reservation(self) -> Callable[ - [gcbr_reservation.CreateReservationRequest], - gcbr_reservation.Reservation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_assignment(self) -> Callable[ - [reservation.DeleteAssignmentRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteAssignment(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_capacity_commitment(self) -> Callable[ - [reservation.DeleteCapacityCommitmentRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_reservation(self) -> Callable[ - [reservation.DeleteReservationRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def failover_reservation(self) -> Callable[ - [reservation.FailoverReservationRequest], - reservation.Reservation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._FailoverReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_bi_reservation(self) -> Callable[ - [reservation.GetBiReservationRequest], - reservation.BiReservation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBiReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_capacity_commitment(self) -> Callable[ - [reservation.GetCapacityCommitmentRequest], - reservation.CapacityCommitment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_reservation(self) -> Callable[ - [reservation.GetReservationRequest], - reservation.Reservation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_assignments(self) -> Callable[ - [reservation.ListAssignmentsRequest], - reservation.ListAssignmentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListAssignments(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_capacity_commitments(self) -> Callable[ - [reservation.ListCapacityCommitmentsRequest], - reservation.ListCapacityCommitmentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListCapacityCommitments(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_reservations(self) -> Callable[ - [reservation.ListReservationsRequest], - reservation.ListReservationsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListReservations(self._session, self._host, self._interceptor) # type: ignore - - @property - def merge_capacity_commitments(self) -> Callable[ - [reservation.MergeCapacityCommitmentsRequest], - reservation.CapacityCommitment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._MergeCapacityCommitments(self._session, self._host, self._interceptor) # type: ignore - - @property - def move_assignment(self) -> Callable[ - [reservation.MoveAssignmentRequest], - reservation.Assignment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._MoveAssignment(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_all_assignments(self) -> Callable[ - [reservation.SearchAllAssignmentsRequest], - reservation.SearchAllAssignmentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchAllAssignments(self._session, self._host, self._interceptor) # type: ignore - - @property - def search_assignments(self) -> Callable[ - [reservation.SearchAssignmentsRequest], - reservation.SearchAssignmentsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SearchAssignments(self._session, self._host, self._interceptor) # type: ignore - - @property - def split_capacity_commitment(self) -> Callable[ - [reservation.SplitCapacityCommitmentRequest], - reservation.SplitCapacityCommitmentResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SplitCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_assignment(self) -> Callable[ - [reservation.UpdateAssignmentRequest], - reservation.Assignment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateAssignment(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_bi_reservation(self) -> Callable[ - [reservation.UpdateBiReservationRequest], - reservation.BiReservation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBiReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_capacity_commitment(self) -> Callable[ - [reservation.UpdateCapacityCommitmentRequest], - reservation.CapacityCommitment]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_reservation(self) -> Callable[ - [gcbr_reservation.UpdateReservationRequest], - gcbr_reservation.Reservation]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateReservation(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ReservationServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py deleted file mode 100644 index 252067f145df..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py +++ /dev/null @@ -1,977 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import ReservationServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseReservationServiceRestTransport(ReservationServiceTransport): - """Base REST backend transport for ReservationService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'bigqueryreservation.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'bigqueryreservation.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateAssignment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*/reservations/*}/assignments', - 'body': 'assignment', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.CreateAssignmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseCreateAssignment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateCapacityCommitment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/capacityCommitments', - 'body': 'capacity_commitment', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.CreateCapacityCommitmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseCreateCapacityCommitment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseCreateReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/reservations', - 'body': 'reservation', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcbr_reservation.CreateReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseCreateReservation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteAssignment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/reservations/*/assignments/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.DeleteAssignmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseDeleteAssignment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteCapacityCommitment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/capacityCommitments/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.DeleteCapacityCommitmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseDeleteCapacityCommitment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/reservations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.DeleteReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseDeleteReservation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseFailoverReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/reservations/*}:failoverReservation', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.FailoverReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseFailoverReservation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBiReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/biReservation}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.GetBiReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseGetBiReservation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetCapacityCommitment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/capacityCommitments/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.GetCapacityCommitmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseGetCapacityCommitment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/reservations/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.GetReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseGetReservation._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListAssignments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*/reservations/*}/assignments', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.ListAssignmentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseListAssignments._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListCapacityCommitments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/capacityCommitments', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.ListCapacityCommitmentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListReservations: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/reservations', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.ListReservationsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseListReservations._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseMergeCapacityCommitments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/capacityCommitments:merge', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.MergeCapacityCommitmentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseMoveAssignment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/reservations/*/assignments/*}:move', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.MoveAssignmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseMoveAssignment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchAllAssignments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}:searchAllAssignments', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.SearchAllAssignmentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSearchAssignments: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}:searchAssignments', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.SearchAssignmentsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseSearchAssignments._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseSplitCapacityCommitment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/capacityCommitments/*}:split', - 'body': '*', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.SplitCapacityCommitmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateAssignment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{assignment.name=projects/*/locations/*/reservations/*/assignments/*}', - 'body': 'assignment', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.UpdateAssignmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBiReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{bi_reservation.name=projects/*/locations/*/biReservation}', - 'body': 'bi_reservation', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.UpdateBiReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateCapacityCommitment: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{capacity_commitment.name=projects/*/locations/*/capacityCommitments/*}', - 'body': 'capacity_commitment', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = reservation.UpdateCapacityCommitmentRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateReservation: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{reservation.name=projects/*/locations/*/reservations/*}', - 'body': 'reservation', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = gcbr_reservation.UpdateReservationRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseReservationServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/types/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/types/__init__.py deleted file mode 100644 index 041b9cb27017..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/types/__init__.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .reservation import ( - Assignment, - BiReservation, - CapacityCommitment, - CreateAssignmentRequest, - CreateCapacityCommitmentRequest, - CreateReservationRequest, - DeleteAssignmentRequest, - DeleteCapacityCommitmentRequest, - DeleteReservationRequest, - FailoverReservationRequest, - GetBiReservationRequest, - GetCapacityCommitmentRequest, - GetReservationRequest, - ListAssignmentsRequest, - ListAssignmentsResponse, - ListCapacityCommitmentsRequest, - ListCapacityCommitmentsResponse, - ListReservationsRequest, - ListReservationsResponse, - MergeCapacityCommitmentsRequest, - MoveAssignmentRequest, - Reservation, - SearchAllAssignmentsRequest, - SearchAllAssignmentsResponse, - SearchAssignmentsRequest, - SearchAssignmentsResponse, - SplitCapacityCommitmentRequest, - SplitCapacityCommitmentResponse, - TableReference, - UpdateAssignmentRequest, - UpdateBiReservationRequest, - UpdateCapacityCommitmentRequest, - UpdateReservationRequest, - Edition, -) - -__all__ = ( - 'Assignment', - 'BiReservation', - 'CapacityCommitment', - 'CreateAssignmentRequest', - 'CreateCapacityCommitmentRequest', - 'CreateReservationRequest', - 'DeleteAssignmentRequest', - 'DeleteCapacityCommitmentRequest', - 'DeleteReservationRequest', - 'FailoverReservationRequest', - 'GetBiReservationRequest', - 'GetCapacityCommitmentRequest', - 'GetReservationRequest', - 'ListAssignmentsRequest', - 'ListAssignmentsResponse', - 'ListCapacityCommitmentsRequest', - 'ListCapacityCommitmentsResponse', - 'ListReservationsRequest', - 'ListReservationsResponse', - 'MergeCapacityCommitmentsRequest', - 'MoveAssignmentRequest', - 'Reservation', - 'SearchAllAssignmentsRequest', - 'SearchAllAssignmentsResponse', - 'SearchAssignmentsRequest', - 'SearchAssignmentsResponse', - 'SplitCapacityCommitmentRequest', - 'SplitCapacityCommitmentResponse', - 'TableReference', - 'UpdateAssignmentRequest', - 'UpdateBiReservationRequest', - 'UpdateCapacityCommitmentRequest', - 'UpdateReservationRequest', - 'Edition', -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/types/reservation.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/types/reservation.py deleted file mode 100644 index 75f737bbed1b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/google/cloud/bigquery_reservation_v1/types/reservation.py +++ /dev/null @@ -1,1465 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.reservation.v1', - manifest={ - 'Edition', - 'Reservation', - 'CapacityCommitment', - 'CreateReservationRequest', - 'ListReservationsRequest', - 'ListReservationsResponse', - 'GetReservationRequest', - 'DeleteReservationRequest', - 'UpdateReservationRequest', - 'FailoverReservationRequest', - 'CreateCapacityCommitmentRequest', - 'ListCapacityCommitmentsRequest', - 'ListCapacityCommitmentsResponse', - 'GetCapacityCommitmentRequest', - 'DeleteCapacityCommitmentRequest', - 'UpdateCapacityCommitmentRequest', - 'SplitCapacityCommitmentRequest', - 'SplitCapacityCommitmentResponse', - 'MergeCapacityCommitmentsRequest', - 'Assignment', - 'CreateAssignmentRequest', - 'ListAssignmentsRequest', - 'ListAssignmentsResponse', - 'DeleteAssignmentRequest', - 'SearchAssignmentsRequest', - 'SearchAllAssignmentsRequest', - 'SearchAssignmentsResponse', - 'SearchAllAssignmentsResponse', - 'MoveAssignmentRequest', - 'UpdateAssignmentRequest', - 'TableReference', - 'BiReservation', - 'GetBiReservationRequest', - 'UpdateBiReservationRequest', - }, -) - - -class Edition(proto.Enum): - r"""The type of editions. - Different features and behaviors are provided to different - editions Capacity commitments and reservations are linked to - editions. - - Values: - EDITION_UNSPECIFIED (0): - Default value, which will be treated as - ENTERPRISE. - STANDARD (1): - Standard edition. - ENTERPRISE (2): - Enterprise edition. - ENTERPRISE_PLUS (3): - Enterprise Plus edition. - """ - EDITION_UNSPECIFIED = 0 - STANDARD = 1 - ENTERPRISE = 2 - ENTERPRISE_PLUS = 3 - - -class Reservation(proto.Message): - r"""A reservation is a mechanism used to guarantee slots to - users. - - Attributes: - name (str): - The resource name of the reservation, e.g., - ``projects/*/locations/*/reservations/team1-prod``. The - reservation_id must only contain lower case alphanumeric - characters or dashes. It must start with a letter and must - not end with a dash. Its maximum length is 64 characters. - slot_capacity (int): - Baseline slots available to this reservation. A slot is a - unit of computational power in BigQuery, and serves as the - unit of parallelism. - - Queries using this reservation might use more slots during - runtime if ignore_idle_slots is set to false, or autoscaling - is enabled. - - The total slot_capacity of the reservation and its siblings - may exceed the total slot_count of capacity commitments. In - that case, the exceeding slots will be charged with the - autoscale SKU. You can increase the number of baseline slots - in a reservation every few minutes. If you want to decrease - your baseline slots, you are limited to once an hour if you - have recently changed your baseline slot capacity and your - baseline slots exceed your committed slots. Otherwise, you - can decrease your baseline slots every few minutes. - ignore_idle_slots (bool): - If false, any query or pipeline job using this reservation - will use idle slots from other reservations within the same - admin project. If true, a query or pipeline job using this - reservation will execute with the slot capacity specified in - the slot_capacity field at most. - autoscale (google.cloud.bigquery_reservation_v1.types.Reservation.Autoscale): - The configuration parameters for the auto - scaling feature. - concurrency (int): - Job concurrency target which sets a soft - upper bound on the number of jobs that can run - concurrently in this reservation. This is a soft - target due to asynchronous nature of the system - and various optimizations for small queries. - Default value is 0 which means that concurrency - target will be automatically computed by the - system. - NOTE: this field is exposed as target job - concurrency in the Information Schema, DDL and - BigQuery CLI. - creation_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Creation time of the - reservation. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. Last update time of the - reservation. - multi_region_auxiliary (bool): - Applicable only for reservations located - within one of the BigQuery multi-regions (US or - EU). - - If set to true, this reservation is placed in - the organization's secondary region which is - designated for disaster recovery purposes. If - false, this reservation is placed in the - organization's default region. - - NOTE: this is a preview feature. Project must be - allow-listed in order to set this field. - edition (google.cloud.bigquery_reservation_v1.types.Edition): - Edition of the reservation. - primary_location (str): - Output only. The current location of the - reservation's primary replica. This field is - only set for reservations using the managed - disaster recovery feature. - secondary_location (str): - Optional. The current location of the - reservation's secondary replica. This field is - only set for reservations using the managed - disaster recovery feature. Users can set this in - create reservation calls to create a failover - reservation or in update reservation calls to - convert a non-failover reservation to a failover - reservation(or vice versa). - original_primary_location (str): - Output only. The location where the - reservation was originally created. This is set - only during the failover reservation's creation. - All billing charges for the failover reservation - will be applied to this location. - replication_status (google.cloud.bigquery_reservation_v1.types.Reservation.ReplicationStatus): - Output only. The Disaster Recovery(DR) - replication status of the reservation. This is - only available for the primary replicas of - DR/failover reservations and provides - information about the both the staleness of the - secondary and the last error encountered while - trying to replicate changes from the primary to - the secondary. If this field is blank, it means - that the reservation is either not a DR - reservation or the reservation is a DR secondary - or that any replication operations on the - reservation have succeeded. - """ - - class Autoscale(proto.Message): - r"""Auto scaling settings. - - Attributes: - current_slots (int): - Output only. The slot capacity added to this reservation - when autoscale happens. Will be between [0, max_slots]. - Note: after users reduce max_slots, it may take a while - before it can be propagated, so current_slots may stay in - the original value and could be larger than max_slots for - that brief period (less than one minute) - max_slots (int): - Number of slots to be scaled when needed. - """ - - current_slots: int = proto.Field( - proto.INT64, - number=1, - ) - max_slots: int = proto.Field( - proto.INT64, - number=2, - ) - - class ReplicationStatus(proto.Message): - r"""Disaster Recovery(DR) replication status of the reservation. - - Attributes: - error (google.rpc.status_pb2.Status): - Output only. The last error encountered while - trying to replicate changes from the primary to - the secondary. This field is only available if - the replication has not succeeded since. - last_error_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time at which the last error - was encountered while trying to replicate - changes from the primary to the secondary. This - field is only available if the replication has - not succeeded since. - last_replication_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. A timestamp corresponding to the - last change on the primary that was successfully - replicated to the secondary. - """ - - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - last_error_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - last_replication_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - slot_capacity: int = proto.Field( - proto.INT64, - number=2, - ) - ignore_idle_slots: bool = proto.Field( - proto.BOOL, - number=4, - ) - autoscale: Autoscale = proto.Field( - proto.MESSAGE, - number=7, - message=Autoscale, - ) - concurrency: int = proto.Field( - proto.INT64, - number=16, - ) - creation_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - multi_region_auxiliary: bool = proto.Field( - proto.BOOL, - number=14, - ) - edition: 'Edition' = proto.Field( - proto.ENUM, - number=17, - enum='Edition', - ) - primary_location: str = proto.Field( - proto.STRING, - number=18, - ) - secondary_location: str = proto.Field( - proto.STRING, - number=19, - ) - original_primary_location: str = proto.Field( - proto.STRING, - number=20, - ) - replication_status: ReplicationStatus = proto.Field( - proto.MESSAGE, - number=24, - message=ReplicationStatus, - ) - - -class CapacityCommitment(proto.Message): - r"""Capacity commitment is a way to purchase compute capacity for - BigQuery jobs (in the form of slots) with some committed period - of usage. Annual commitments renew by default. Commitments can - be removed after their commitment end time passes. - - In order to remove annual commitment, its plan needs to be - changed to monthly or flex first. - - A capacity commitment resource exists as a child resource of the - admin project. - - Attributes: - name (str): - Output only. The resource name of the capacity commitment, - e.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - The commitment_id must only contain lower case alphanumeric - characters or dashes. It must start with a letter and must - not end with a dash. Its maximum length is 64 characters. - slot_count (int): - Number of slots in this commitment. - plan (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.CommitmentPlan): - Capacity commitment commitment plan. - state (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.State): - Output only. State of the commitment. - commitment_start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The start of the current commitment period. It - is applicable only for ACTIVE capacity commitments. Note - after the commitment is renewed, commitment_start_time won't - be changed. It refers to the start time of the original - commitment. - commitment_end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The end of the current commitment period. It is - applicable only for ACTIVE capacity commitments. Note after - renewal, commitment_end_time is the time the renewed - commitment expires. So it would be at a time after - commitment_start_time + committed period, because we don't - change commitment_start_time , - failure_status (google.rpc.status_pb2.Status): - Output only. For FAILED commitment plan, - provides the reason of failure. - renewal_plan (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.CommitmentPlan): - The plan this capacity commitment is converted to after - commitment_end_time passes. Once the plan is changed, - committed period is extended according to commitment plan. - Only applicable for ANNUAL and TRIAL commitments. - multi_region_auxiliary (bool): - Applicable only for commitments located - within one of the BigQuery multi-regions (US or - EU). - - If set to true, this commitment is placed in the - organization's secondary region which is - designated for disaster recovery purposes. If - false, this commitment is placed in the - organization's default region. - - NOTE: this is a preview feature. Project must be - allow-listed in order to set this field. - edition (google.cloud.bigquery_reservation_v1.types.Edition): - Edition of the capacity commitment. - is_flat_rate (bool): - Output only. If true, the commitment is a - flat-rate commitment, otherwise, it's an edition - commitment. - """ - class CommitmentPlan(proto.Enum): - r"""Commitment plan defines the current committed period. - Capacity commitment cannot be deleted during it's committed - period. - - Values: - COMMITMENT_PLAN_UNSPECIFIED (0): - Invalid plan value. Requests with this value will be - rejected with error code - ``google.rpc.Code.INVALID_ARGUMENT``. - FLEX (3): - Flex commitments have committed period of 1 - minute after becoming ACTIVE. After that, they - are not in a committed period anymore and can be - removed any time. - FLEX_FLAT_RATE (7): - Same as FLEX, should only be used if - flat-rate commitments are still available. - TRIAL (5): - Trial commitments have a committed period of 182 days after - becoming ACTIVE. After that, they are converted to a new - commitment based on the ``renewal_plan``. Default - ``renewal_plan`` for Trial commitment is Flex so that it can - be deleted right after committed period ends. - MONTHLY (2): - Monthly commitments have a committed period - of 30 days after becoming ACTIVE. After that, - they are not in a committed period anymore and - can be removed any time. - MONTHLY_FLAT_RATE (8): - Same as MONTHLY, should only be used if - flat-rate commitments are still available. - ANNUAL (4): - Annual commitments have a committed period of 365 days after - becoming ACTIVE. After that they are converted to a new - commitment based on the renewal_plan. - ANNUAL_FLAT_RATE (9): - Same as ANNUAL, should only be used if - flat-rate commitments are still available. - THREE_YEAR (10): - 3-year commitments have a committed period of 1095(3 \* 365) - days after becoming ACTIVE. After that they are converted to - a new commitment based on the renewal_plan. - NONE (6): - Should only be used for ``renewal_plan`` and is only - meaningful if edition is specified to values other than - EDITION_UNSPECIFIED. Otherwise - CreateCapacityCommitmentRequest or - UpdateCapacityCommitmentRequest will be rejected with error - code ``google.rpc.Code.INVALID_ARGUMENT``. If the - renewal_plan is NONE, capacity commitment will be removed at - the end of its commitment period. - """ - COMMITMENT_PLAN_UNSPECIFIED = 0 - FLEX = 3 - FLEX_FLAT_RATE = 7 - TRIAL = 5 - MONTHLY = 2 - MONTHLY_FLAT_RATE = 8 - ANNUAL = 4 - ANNUAL_FLAT_RATE = 9 - THREE_YEAR = 10 - NONE = 6 - - class State(proto.Enum): - r"""Capacity commitment can either become ACTIVE right away or - transition from PENDING to ACTIVE or FAILED. - - Values: - STATE_UNSPECIFIED (0): - Invalid state value. - PENDING (1): - Capacity commitment is pending provisioning. Pending - capacity commitment does not contribute to the project's - slot_capacity. - ACTIVE (2): - Once slots are provisioned, capacity commitment becomes - active. slot_count is added to the project's slot_capacity. - FAILED (3): - Capacity commitment is failed to be activated - by the backend. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - ACTIVE = 2 - FAILED = 3 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - slot_count: int = proto.Field( - proto.INT64, - number=2, - ) - plan: CommitmentPlan = proto.Field( - proto.ENUM, - number=3, - enum=CommitmentPlan, - ) - state: State = proto.Field( - proto.ENUM, - number=4, - enum=State, - ) - commitment_start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - commitment_end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - failure_status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=7, - message=status_pb2.Status, - ) - renewal_plan: CommitmentPlan = proto.Field( - proto.ENUM, - number=8, - enum=CommitmentPlan, - ) - multi_region_auxiliary: bool = proto.Field( - proto.BOOL, - number=10, - ) - edition: 'Edition' = proto.Field( - proto.ENUM, - number=12, - enum='Edition', - ) - is_flat_rate: bool = proto.Field( - proto.BOOL, - number=14, - ) - - -class CreateReservationRequest(proto.Message): - r"""The request for - [ReservationService.CreateReservation][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation]. - - Attributes: - parent (str): - Required. Project, location. E.g., - ``projects/myproject/locations/US`` - reservation_id (str): - The reservation ID. It must only contain - lower case alphanumeric characters or dashes. It - must start with a letter and must not end with a - dash. Its maximum length is 64 characters. - reservation (google.cloud.bigquery_reservation_v1.types.Reservation): - Definition of the new reservation to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - reservation_id: str = proto.Field( - proto.STRING, - number=2, - ) - reservation: 'Reservation' = proto.Field( - proto.MESSAGE, - number=3, - message='Reservation', - ) - - -class ListReservationsRequest(proto.Message): - r"""The request for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - - Attributes: - parent (str): - Required. The parent resource name containing project and - location, e.g.: ``projects/myproject/locations/US`` - page_size (int): - The maximum number of items to return per - page. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListReservationsResponse(proto.Message): - r"""The response for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. - - Attributes: - reservations (MutableSequence[google.cloud.bigquery_reservation_v1.types.Reservation]): - List of reservations visible to the user. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - reservations: MutableSequence['Reservation'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Reservation', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetReservationRequest(proto.Message): - r"""The request for - [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. - - Attributes: - name (str): - Required. Resource name of the reservation to retrieve. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteReservationRequest(proto.Message): - r"""The request for - [ReservationService.DeleteReservation][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation]. - - Attributes: - name (str): - Required. Resource name of the reservation to retrieve. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateReservationRequest(proto.Message): - r"""The request for - [ReservationService.UpdateReservation][google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation]. - - Attributes: - reservation (google.cloud.bigquery_reservation_v1.types.Reservation): - Content of the reservation to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Standard field mask for the set of fields to - be updated. - """ - - reservation: 'Reservation' = proto.Field( - proto.MESSAGE, - number=1, - message='Reservation', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class FailoverReservationRequest(proto.Message): - r"""The request for ReservationService.FailoverReservation. - - Attributes: - name (str): - Required. Resource name of the reservation to failover. - E.g., - ``projects/myproject/locations/US/reservations/team1-prod`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateCapacityCommitmentRequest(proto.Message): - r"""The request for - [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. - - Attributes: - parent (str): - Required. Resource name of the parent reservation. E.g., - ``projects/myproject/locations/US`` - capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): - Content of the capacity commitment to create. - enforce_single_admin_project_per_org (bool): - If true, fail the request if another project - in the organization has a capacity commitment. - capacity_commitment_id (str): - The optional capacity commitment ID. Capacity - commitment name will be generated automatically - if this field is empty. This field must only - contain lower case alphanumeric characters or - dashes. The first and last character cannot be a - dash. Max length is 64 characters. NOTE: this ID - won't be kept if the capacity commitment is - split or merged. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - capacity_commitment: 'CapacityCommitment' = proto.Field( - proto.MESSAGE, - number=2, - message='CapacityCommitment', - ) - enforce_single_admin_project_per_org: bool = proto.Field( - proto.BOOL, - number=4, - ) - capacity_commitment_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ListCapacityCommitmentsRequest(proto.Message): - r"""The request for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - - Attributes: - parent (str): - Required. Resource name of the parent reservation. E.g., - ``projects/myproject/locations/US`` - page_size (int): - The maximum number of items to return. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListCapacityCommitmentsResponse(proto.Message): - r"""The response for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. - - Attributes: - capacity_commitments (MutableSequence[google.cloud.bigquery_reservation_v1.types.CapacityCommitment]): - List of capacity commitments visible to the - user. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - capacity_commitments: MutableSequence['CapacityCommitment'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='CapacityCommitment', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetCapacityCommitmentRequest(proto.Message): - r"""The request for - [ReservationService.GetCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment]. - - Attributes: - name (str): - Required. Resource name of the capacity commitment to - retrieve. E.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteCapacityCommitmentRequest(proto.Message): - r"""The request for - [ReservationService.DeleteCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment]. - - Attributes: - name (str): - Required. Resource name of the capacity commitment to - delete. E.g., - ``projects/myproject/locations/US/capacityCommitments/123`` - force (bool): - Can be used to force delete commitments even - if assignments exist. Deleting commitments with - assignments may cause queries to fail if they no - longer have access to slots. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - force: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -class UpdateCapacityCommitmentRequest(proto.Message): - r"""The request for - [ReservationService.UpdateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment]. - - Attributes: - capacity_commitment (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): - Content of the capacity commitment to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Standard field mask for the set of fields to - be updated. - """ - - capacity_commitment: 'CapacityCommitment' = proto.Field( - proto.MESSAGE, - number=1, - message='CapacityCommitment', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class SplitCapacityCommitmentRequest(proto.Message): - r"""The request for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - - Attributes: - name (str): - Required. The resource name e.g.,: - ``projects/myproject/locations/US/capacityCommitments/123`` - slot_count (int): - Number of slots in the capacity commitment - after the split. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - slot_count: int = proto.Field( - proto.INT64, - number=2, - ) - - -class SplitCapacityCommitmentResponse(proto.Message): - r"""The response for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - - Attributes: - first (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): - First capacity commitment, result of a split. - second (google.cloud.bigquery_reservation_v1.types.CapacityCommitment): - Second capacity commitment, result of a - split. - """ - - first: 'CapacityCommitment' = proto.Field( - proto.MESSAGE, - number=1, - message='CapacityCommitment', - ) - second: 'CapacityCommitment' = proto.Field( - proto.MESSAGE, - number=2, - message='CapacityCommitment', - ) - - -class MergeCapacityCommitmentsRequest(proto.Message): - r"""The request for - [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. - - Attributes: - parent (str): - Parent resource that identifies admin project and location - e.g., ``projects/myproject/locations/us`` - capacity_commitment_ids (MutableSequence[str]): - Ids of capacity commitments to merge. - These capacity commitments must exist under - admin project and location specified in the - parent. - ID is the last portion of capacity commitment - name e.g., 'abc' for - projects/myproject/locations/US/capacityCommitments/abc - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - capacity_commitment_ids: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class Assignment(proto.Message): - r"""An assignment allows a project to submit jobs - of a certain type using slots from the specified reservation. - - Attributes: - name (str): - Output only. Name of the resource. E.g.: - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123``. - The assignment_id must only contain lower case alphanumeric - characters or dashes and the max length is 64 characters. - assignee (str): - The resource which will use the reservation. E.g. - ``projects/myproject``, ``folders/123``, or - ``organizations/456``. - job_type (google.cloud.bigquery_reservation_v1.types.Assignment.JobType): - Which type of jobs will use the reservation. - state (google.cloud.bigquery_reservation_v1.types.Assignment.State): - Output only. State of the assignment. - enable_gemini_in_bigquery (bool): - Optional. This field controls if "Gemini in BigQuery" - (https://cloud.google.com/gemini/docs/bigquery/overview) - features should be enabled for this reservation assignment, - which is not on by default. "Gemini in BigQuery" has a - distinct compliance posture from BigQuery. If this field is - set to true, the assignment job type is QUERY, and the - parent reservation edition is ENTERPRISE_PLUS, then the - assignment will give the grantee project/organization access - to "Gemini in BigQuery" features. - """ - class JobType(proto.Enum): - r"""Types of job, which could be specified when using the - reservation. - - Values: - JOB_TYPE_UNSPECIFIED (0): - Invalid type. Requests with this value will be rejected with - error code ``google.rpc.Code.INVALID_ARGUMENT``. - PIPELINE (1): - Pipeline (load/export) jobs from the project - will use the reservation. - QUERY (2): - Query jobs from the project will use the - reservation. - ML_EXTERNAL (3): - BigQuery ML jobs that use services external - to BigQuery for model training. These jobs will - not utilize idle slots from other reservations. - BACKGROUND (4): - Background jobs that BigQuery runs for the - customers in the background. - CONTINUOUS (6): - Continuous SQL jobs will use this - reservation. Reservations with continuous - assignments cannot be mixed with non-continuous - assignments. - """ - JOB_TYPE_UNSPECIFIED = 0 - PIPELINE = 1 - QUERY = 2 - ML_EXTERNAL = 3 - BACKGROUND = 4 - CONTINUOUS = 6 - - class State(proto.Enum): - r"""Assignment will remain in PENDING state if no active capacity - commitment is present. It will become ACTIVE when some capacity - commitment becomes active. - - Values: - STATE_UNSPECIFIED (0): - Invalid state value. - PENDING (1): - Queries from assignee will be executed as - on-demand, if related assignment is pending. - ACTIVE (2): - Assignment is ready. - """ - STATE_UNSPECIFIED = 0 - PENDING = 1 - ACTIVE = 2 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - assignee: str = proto.Field( - proto.STRING, - number=4, - ) - job_type: JobType = proto.Field( - proto.ENUM, - number=3, - enum=JobType, - ) - state: State = proto.Field( - proto.ENUM, - number=6, - enum=State, - ) - enable_gemini_in_bigquery: bool = proto.Field( - proto.BOOL, - number=10, - ) - - -class CreateAssignmentRequest(proto.Message): - r"""The request for - [ReservationService.CreateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment]. - Note: "bigquery.reservationAssignments.create" permission is - required on the related assignee. - - Attributes: - parent (str): - Required. The parent resource name of the assignment E.g. - ``projects/myproject/locations/US/reservations/team1-prod`` - assignment (google.cloud.bigquery_reservation_v1.types.Assignment): - Assignment resource to create. - assignment_id (str): - The optional assignment ID. Assignment name - will be generated automatically if this field is - empty. This field must only contain lower case - alphanumeric characters or dashes. Max length is - 64 characters. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - assignment: 'Assignment' = proto.Field( - proto.MESSAGE, - number=2, - message='Assignment', - ) - assignment_id: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListAssignmentsRequest(proto.Message): - r"""The request for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - - Attributes: - parent (str): - Required. The parent resource name e.g.: - - ``projects/myproject/locations/US/reservations/team1-prod`` - - Or: - - ``projects/myproject/locations/US/reservations/-`` - page_size (int): - The maximum number of items to return per - page. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListAssignmentsResponse(proto.Message): - r"""The response for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. - - Attributes: - assignments (MutableSequence[google.cloud.bigquery_reservation_v1.types.Assignment]): - List of assignments visible to the user. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - assignments: MutableSequence['Assignment'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Assignment', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteAssignmentRequest(proto.Message): - r"""The request for - [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. - Note: "bigquery.reservationAssignments.delete" permission is - required on the related assignee. - - Attributes: - name (str): - Required. Name of the resource, e.g. - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SearchAssignmentsRequest(proto.Message): - r"""The request for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - Note: "bigquery.reservationAssignments.search" permission is - required on the related assignee. - - Attributes: - parent (str): - Required. The resource name of the admin project(containing - project and location), e.g.: - ``projects/myproject/locations/US``. - query (str): - Please specify resource name as assignee in the query. - - Examples: - - - ``assignee=projects/myproject`` - - ``assignee=folders/123`` - - ``assignee=organizations/456`` - page_size (int): - The maximum number of items to return per - page. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class SearchAllAssignmentsRequest(proto.Message): - r"""The request for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - Note: "bigquery.reservationAssignments.search" permission is - required on the related assignee. - - Attributes: - parent (str): - Required. The resource name with location (project name - could be the wildcard '-'), e.g.: - ``projects/-/locations/US``. - query (str): - Please specify resource name as assignee in the query. - - Examples: - - - ``assignee=projects/myproject`` - - ``assignee=folders/123`` - - ``assignee=organizations/456`` - page_size (int): - The maximum number of items to return per - page. - page_token (str): - The next_page_token value returned from a previous List - request, if any. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - query: str = proto.Field( - proto.STRING, - number=2, - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - - -class SearchAssignmentsResponse(proto.Message): - r"""The response for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - - Attributes: - assignments (MutableSequence[google.cloud.bigquery_reservation_v1.types.Assignment]): - List of assignments visible to the user. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - assignments: MutableSequence['Assignment'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Assignment', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class SearchAllAssignmentsResponse(proto.Message): - r"""The response for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - - Attributes: - assignments (MutableSequence[google.cloud.bigquery_reservation_v1.types.Assignment]): - List of assignments visible to the user. - next_page_token (str): - Token to retrieve the next page of results, - or empty if there are no more results in the - list. - """ - - @property - def raw_page(self): - return self - - assignments: MutableSequence['Assignment'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Assignment', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class MoveAssignmentRequest(proto.Message): - r"""The request for - [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. - - **Note**: "bigquery.reservationAssignments.create" permission is - required on the destination_id. - - **Note**: "bigquery.reservationAssignments.create" and - "bigquery.reservationAssignments.delete" permission are required on - the related assignee. - - Attributes: - name (str): - Required. The resource name of the assignment, e.g. - ``projects/myproject/locations/US/reservations/team1-prod/assignments/123`` - destination_id (str): - The new reservation ID, e.g.: - ``projects/myotherproject/locations/US/reservations/team2-prod`` - assignment_id (str): - The optional assignment ID. A new assignment - name is generated if this field is empty. - - This field can contain only lowercase - alphanumeric characters or dashes. Max length is - 64 characters. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - destination_id: str = proto.Field( - proto.STRING, - number=3, - ) - assignment_id: str = proto.Field( - proto.STRING, - number=5, - ) - - -class UpdateAssignmentRequest(proto.Message): - r"""The request for - [ReservationService.UpdateAssignment][google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment]. - - Attributes: - assignment (google.cloud.bigquery_reservation_v1.types.Assignment): - Content of the assignment to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Standard field mask for the set of fields to - be updated. - """ - - assignment: 'Assignment' = proto.Field( - proto.MESSAGE, - number=1, - message='Assignment', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class TableReference(proto.Message): - r"""Fully qualified reference to BigQuery table. - Internally stored as google.cloud.bi.v1.BqTableReference. - - Attributes: - project_id (str): - The assigned project ID of the project. - dataset_id (str): - The ID of the dataset in the above project. - table_id (str): - The ID of the table in the above dataset. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - dataset_id: str = proto.Field( - proto.STRING, - number=2, - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BiReservation(proto.Message): - r"""Represents a BI Reservation. - - Attributes: - name (str): - The resource name of the singleton BI reservation. - Reservation names have the form - ``projects/{project_id}/locations/{location_id}/biReservation``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - reservation. - size (int): - Size of a reservation, in bytes. - preferred_tables (MutableSequence[google.cloud.bigquery_reservation_v1.types.TableReference]): - Preferred tables to use BI capacity for. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - size: int = proto.Field( - proto.INT64, - number=4, - ) - preferred_tables: MutableSequence['TableReference'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='TableReference', - ) - - -class GetBiReservationRequest(proto.Message): - r"""A request to get a singleton BI reservation. - - Attributes: - name (str): - Required. Name of the requested reservation, for example: - ``projects/{project_id}/locations/{location_id}/biReservation`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateBiReservationRequest(proto.Message): - r"""A request to update a BI reservation. - - Attributes: - bi_reservation (google.cloud.bigquery_reservation_v1.types.BiReservation): - A reservation to update. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - A list of fields to be updated in this - request. - """ - - bi_reservation: 'BiReservation' = proto.Field( - proto.MESSAGE, - number=1, - message='BiReservation', - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/mypy.ini b/owl-bot-staging/google-cloud-bigquery-reservation/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/noxfile.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/noxfile.py deleted file mode 100644 index 7b2776dc5dd3..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-bigquery-reservation' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_reservation_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_reservation_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_assignment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_assignment_async.py deleted file mode 100644 index fbbbda5ce2d2..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_assignment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_CreateAssignment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_create_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateAssignmentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_assignment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_CreateAssignment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_assignment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_assignment_sync.py deleted file mode 100644 index b60a2fefcb72..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_assignment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_CreateAssignment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_create_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateAssignmentRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_assignment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_CreateAssignment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_async.py deleted file mode 100644 index 910fd2abd9fe..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_CreateCapacityCommitment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_create_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateCapacityCommitmentRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_CreateCapacityCommitment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_sync.py deleted file mode 100644 index 8b4a297fe37f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_CreateCapacityCommitment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_create_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateCapacityCommitmentRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_CreateCapacityCommitment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_async.py deleted file mode 100644 index de9e3dcc3524..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_CreateReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_create_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateReservationRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_CreateReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_sync.py deleted file mode 100644 index 3d1972936ebf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_CreateReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_create_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.CreateReservationRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_CreateReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_assignment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_assignment_async.py deleted file mode 100644 index e9b4c82a74b0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_assignment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_DeleteAssignment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_delete_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteAssignmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_assignment(request=request) - - -# [END bigqueryreservation_v1_generated_ReservationService_DeleteAssignment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_assignment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_assignment_sync.py deleted file mode 100644 index 018be6e62fba..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_assignment_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_DeleteAssignment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_delete_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteAssignmentRequest( - name="name_value", - ) - - # Make the request - client.delete_assignment(request=request) - - -# [END bigqueryreservation_v1_generated_ReservationService_DeleteAssignment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_async.py deleted file mode 100644 index 47a487172042..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_DeleteCapacityCommitment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_delete_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - await client.delete_capacity_commitment(request=request) - - -# [END bigqueryreservation_v1_generated_ReservationService_DeleteCapacityCommitment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_sync.py deleted file mode 100644 index 92f45e8f551e..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_DeleteCapacityCommitment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_delete_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - client.delete_capacity_commitment(request=request) - - -# [END bigqueryreservation_v1_generated_ReservationService_DeleteCapacityCommitment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py deleted file mode 100644 index 045e85bb4cad..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_DeleteReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_delete_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteReservationRequest( - name="name_value", - ) - - # Make the request - await client.delete_reservation(request=request) - - -# [END bigqueryreservation_v1_generated_ReservationService_DeleteReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py deleted file mode 100644 index 0bcd9edd216f..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_DeleteReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_delete_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.DeleteReservationRequest( - name="name_value", - ) - - # Make the request - client.delete_reservation(request=request) - - -# [END bigqueryreservation_v1_generated_ReservationService_DeleteReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py deleted file mode 100644 index bb18225e5d79..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FailoverReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_FailoverReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_failover_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.FailoverReservationRequest( - name="name_value", - ) - - # Make the request - response = await client.failover_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_FailoverReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py deleted file mode 100644 index b4537e1a8461..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for FailoverReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_FailoverReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_failover_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.FailoverReservationRequest( - name="name_value", - ) - - # Make the request - response = client.failover_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_FailoverReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py deleted file mode 100644 index 9a7209aee9fc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBiReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_GetBiReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_get_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetBiReservationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_bi_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_GetBiReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py deleted file mode 100644 index 2c82fdc5cf63..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBiReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_GetBiReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_get_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetBiReservationRequest( - name="name_value", - ) - - # Make the request - response = client.get_bi_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_GetBiReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py deleted file mode 100644 index 90408c82bff6..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_get_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = await client.get_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py deleted file mode 100644 index 53ccc6266eee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_get_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = client.get_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_async.py deleted file mode 100644 index ba4e8f62b64b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_GetReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_get_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetReservationRequest( - name="name_value", - ) - - # Make the request - response = await client.get_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_GetReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_sync.py deleted file mode 100644 index ea04c9afa4cf..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_GetReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_get_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.GetReservationRequest( - name="name_value", - ) - - # Make the request - response = client.get_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_GetReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_assignments_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_assignments_async.py deleted file mode 100644 index 29f3287926dd..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_assignments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssignments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_ListAssignments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_list_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assignments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_ListAssignments_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_assignments_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_assignments_sync.py deleted file mode 100644 index 875b70bca06b..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_assignments_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListAssignments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_ListAssignments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_list_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_assignments(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_ListAssignments_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_async.py deleted file mode 100644 index dfd88461b222..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCapacityCommitments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_list_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListCapacityCommitmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_capacity_commitments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py deleted file mode 100644 index 5dbe84268750..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListCapacityCommitments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_list_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListCapacityCommitmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_capacity_commitments(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservations_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservations_async.py deleted file mode 100644 index a555d8f474ea..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservations_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListReservations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_ListReservations_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_list_reservations(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListReservationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_reservations(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_ListReservations_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservations_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservations_sync.py deleted file mode 100644 index 3f6830eeb3a8..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservations_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListReservations -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_ListReservations_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_list_reservations(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.ListReservationsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_reservations(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_ListReservations_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_async.py deleted file mode 100644 index 0edbf6a75185..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MergeCapacityCommitments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_MergeCapacityCommitments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_merge_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MergeCapacityCommitmentsRequest( - ) - - # Make the request - response = await client.merge_capacity_commitments(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_MergeCapacityCommitments_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_sync.py deleted file mode 100644 index bae6d3e93f8d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MergeCapacityCommitments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_MergeCapacityCommitments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_merge_capacity_commitments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MergeCapacityCommitmentsRequest( - ) - - # Make the request - response = client.merge_capacity_commitments(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_MergeCapacityCommitments_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_move_assignment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_move_assignment_async.py deleted file mode 100644 index f48979ab47b7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_move_assignment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MoveAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_MoveAssignment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_move_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MoveAssignmentRequest( - name="name_value", - ) - - # Make the request - response = await client.move_assignment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_MoveAssignment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_move_assignment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_move_assignment_sync.py deleted file mode 100644 index 4f69c9c1e388..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_move_assignment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for MoveAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_MoveAssignment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_move_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.MoveAssignmentRequest( - name="name_value", - ) - - # Make the request - response = client.move_assignment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_MoveAssignment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_all_assignments_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_all_assignments_async.py deleted file mode 100644 index 59f90cf33941..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_all_assignments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllAssignments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_SearchAllAssignments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_search_all_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAllAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_all_assignments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_SearchAllAssignments_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_all_assignments_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_all_assignments_sync.py deleted file mode 100644 index 86fc92e35085..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_all_assignments_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAllAssignments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_SearchAllAssignments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_search_all_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAllAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_all_assignments(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_SearchAllAssignments_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_assignments_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_assignments_async.py deleted file mode 100644 index b2022ac22120..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_assignments_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAssignments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_SearchAssignments_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_search_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_assignments(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_SearchAssignments_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_assignments_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_assignments_sync.py deleted file mode 100644 index 120ab7b2c39d..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_search_assignments_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SearchAssignments -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_SearchAssignments_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_search_assignments(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SearchAssignmentsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.search_assignments(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_SearchAssignments_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_async.py deleted file mode 100644 index 78b1f76b1ce0..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SplitCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_SplitCapacityCommitment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_split_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SplitCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = await client.split_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_SplitCapacityCommitment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_sync.py deleted file mode 100644 index 89633e2b7a83..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SplitCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_SplitCapacityCommitment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_split_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.SplitCapacityCommitmentRequest( - name="name_value", - ) - - # Make the request - response = client.split_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_SplitCapacityCommitment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_assignment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_assignment_async.py deleted file mode 100644 index c4b4a8d39281..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_assignment_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateAssignment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_update_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateAssignmentRequest( - ) - - # Make the request - response = await client.update_assignment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateAssignment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_assignment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_assignment_sync.py deleted file mode 100644 index d37981c6bf4c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_assignment_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateAssignment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateAssignment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_update_assignment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateAssignmentRequest( - ) - - # Make the request - response = client.update_assignment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateAssignment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_async.py deleted file mode 100644 index 2c01094b0c60..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBiReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateBiReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_update_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateBiReservationRequest( - ) - - # Make the request - response = await client.update_bi_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateBiReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_sync.py deleted file mode 100644 index 0df4a9148a51..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBiReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateBiReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_update_bi_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateBiReservationRequest( - ) - - # Make the request - response = client.update_bi_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateBiReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_async.py deleted file mode 100644 index 88e4ca37e1f7..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateCapacityCommitment_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_update_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateCapacityCommitmentRequest( - ) - - # Make the request - response = await client.update_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateCapacityCommitment_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_sync.py deleted file mode 100644 index 6d272163c3ce..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateCapacityCommitment -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateCapacityCommitment_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_update_capacity_commitment(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateCapacityCommitmentRequest( - ) - - # Make the request - response = client.update_capacity_commitment(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateCapacityCommitment_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_reservation_async.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_reservation_async.py deleted file mode 100644 index f894b48e99a4..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_reservation_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateReservation_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -async def sample_update_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceAsyncClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateReservationRequest( - ) - - # Make the request - response = await client.update_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateReservation_async] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_reservation_sync.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_reservation_sync.py deleted file mode 100644 index 47df79049e2c..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_update_reservation_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateReservation -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-bigquery-reservation - - -# [START bigqueryreservation_v1_generated_ReservationService_UpdateReservation_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import bigquery_reservation_v1 - - -def sample_update_reservation(): - # Create a client - client = bigquery_reservation_v1.ReservationServiceClient() - - # Initialize request argument(s) - request = bigquery_reservation_v1.UpdateReservationRequest( - ) - - # Make the request - response = client.update_reservation(request=request) - - # Handle the response - print(response) - -# [END bigqueryreservation_v1_generated_ReservationService_UpdateReservation_sync] diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json deleted file mode 100644 index 4cdd226b3806..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ /dev/null @@ -1,3635 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.bigquery.reservation.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-bigquery-reservation", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.create_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "CreateAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.CreateAssignmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "assignment", - "type": "google.cloud.bigquery_reservation_v1.types.Assignment" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Assignment", - "shortName": "create_assignment" - }, - "description": "Sample for CreateAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_create_assignment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateAssignment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_create_assignment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.create_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "CreateAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.CreateAssignmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "assignment", - "type": "google.cloud.bigquery_reservation_v1.types.Assignment" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Assignment", - "shortName": "create_assignment" - }, - "description": "Sample for CreateAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_create_assignment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateAssignment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_create_assignment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.create_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "CreateCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.CreateCapacityCommitmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "capacity_commitment", - "type": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "create_capacity_commitment" - }, - "description": "Sample for CreateCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateCapacityCommitment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.create_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "CreateCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.CreateCapacityCommitmentRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "capacity_commitment", - "type": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "create_capacity_commitment" - }, - "description": "Sample for CreateCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateCapacityCommitment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.create_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "CreateReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.CreateReservationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "reservation", - "type": "google.cloud.bigquery_reservation_v1.types.Reservation" - }, - { - "name": "reservation_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "create_reservation" - }, - "description": "Sample for CreateReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_create_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateReservation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_create_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.create_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "CreateReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.CreateReservationRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "reservation", - "type": "google.cloud.bigquery_reservation_v1.types.Reservation" - }, - { - "name": "reservation_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "create_reservation" - }, - "description": "Sample for CreateReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_create_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateReservation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_create_reservation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.delete_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "DeleteAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteAssignmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_assignment" - }, - "description": "Sample for DeleteAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_assignment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteAssignment_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_assignment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.delete_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "DeleteAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteAssignmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_assignment" - }, - "description": "Sample for DeleteAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_assignment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteAssignment_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_assignment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.delete_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "DeleteCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteCapacityCommitmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_capacity_commitment" - }, - "description": "Sample for DeleteCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteCapacityCommitment_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.delete_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "DeleteCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteCapacityCommitmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_capacity_commitment" - }, - "description": "Sample for DeleteCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteCapacityCommitment_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_capacity_commitment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.delete_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "DeleteReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_reservation" - }, - "description": "Sample for DeleteReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservation_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.delete_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "DeleteReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_reservation" - }, - "description": "Sample for DeleteReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservation_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.failover_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.FailoverReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "FailoverReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "failover_reservation" - }, - "description": "Sample for FailoverReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_FailoverReservation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.failover_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.FailoverReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "FailoverReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "failover_reservation" - }, - "description": "Sample for FailoverReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_FailoverReservation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_bi_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetBiReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "GetBiReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", - "shortName": "get_bi_reservation" - }, - "description": "Sample for GetBiReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetBiReservation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_bi_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetBiReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "GetBiReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", - "shortName": "get_bi_reservation" - }, - "description": "Sample for GetBiReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetBiReservation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "GetCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "get_capacity_commitment" - }, - "description": "Sample for GetCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "GetCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "get_capacity_commitment" - }, - "description": "Sample for GetCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "GetReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetReservationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "get_reservation" - }, - "description": "Sample for GetReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_get_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetReservation_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "GetReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetReservationRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "get_reservation" - }, - "description": "Sample for GetReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_get_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetReservation_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_reservation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.list_assignments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "ListAssignments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListAssignmentsAsyncPager", - "shortName": "list_assignments" - }, - "description": "Sample for ListAssignments", - "file": "bigqueryreservation_v1_generated_reservation_service_list_assignments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListAssignments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_assignments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.list_assignments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "ListAssignments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.ListAssignmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListAssignmentsPager", - "shortName": "list_assignments" - }, - "description": "Sample for ListAssignments", - "file": "bigqueryreservation_v1_generated_reservation_service_list_assignments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListAssignments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_assignments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.list_capacity_commitments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "ListCapacityCommitments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsAsyncPager", - "shortName": "list_capacity_commitments" - }, - "description": "Sample for ListCapacityCommitments", - "file": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.list_capacity_commitments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "ListCapacityCommitments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.ListCapacityCommitmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsPager", - "shortName": "list_capacity_commitments" - }, - "description": "Sample for ListCapacityCommitments", - "file": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.list_reservations", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListReservations", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "ListReservations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.ListReservationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationsAsyncPager", - "shortName": "list_reservations" - }, - "description": "Sample for ListReservations", - "file": "bigqueryreservation_v1_generated_reservation_service_list_reservations_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListReservations_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_reservations_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.list_reservations", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListReservations", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "ListReservations" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.ListReservationsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationsPager", - "shortName": "list_reservations" - }, - "description": "Sample for ListReservations", - "file": "bigqueryreservation_v1_generated_reservation_service_list_reservations_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListReservations_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_reservations_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.merge_capacity_commitments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "MergeCapacityCommitments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.MergeCapacityCommitmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "capacity_commitment_ids", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "merge_capacity_commitments" - }, - "description": "Sample for MergeCapacityCommitments", - "file": "bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_MergeCapacityCommitments_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.merge_capacity_commitments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "MergeCapacityCommitments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.MergeCapacityCommitmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "capacity_commitment_ids", - "type": "MutableSequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "merge_capacity_commitments" - }, - "description": "Sample for MergeCapacityCommitments", - "file": "bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_MergeCapacityCommitments_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_merge_capacity_commitments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.move_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "MoveAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.MoveAssignmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "destination_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Assignment", - "shortName": "move_assignment" - }, - "description": "Sample for MoveAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_move_assignment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_MoveAssignment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_move_assignment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.move_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "MoveAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.MoveAssignmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "destination_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Assignment", - "shortName": "move_assignment" - }, - "description": "Sample for MoveAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_move_assignment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_MoveAssignment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_move_assignment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.search_all_assignments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "SearchAllAssignments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAllAssignmentsAsyncPager", - "shortName": "search_all_assignments" - }, - "description": "Sample for SearchAllAssignments", - "file": "bigqueryreservation_v1_generated_reservation_service_search_all_assignments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_SearchAllAssignments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_search_all_assignments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.search_all_assignments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "SearchAllAssignments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.SearchAllAssignmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAllAssignmentsPager", - "shortName": "search_all_assignments" - }, - "description": "Sample for SearchAllAssignments", - "file": "bigqueryreservation_v1_generated_reservation_service_search_all_assignments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_SearchAllAssignments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_search_all_assignments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.search_assignments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "SearchAssignments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAssignmentsAsyncPager", - "shortName": "search_assignments" - }, - "description": "Sample for SearchAssignments", - "file": "bigqueryreservation_v1_generated_reservation_service_search_assignments_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_SearchAssignments_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_search_assignments_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.search_assignments", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "SearchAssignments" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.SearchAssignmentsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "query", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.SearchAssignmentsPager", - "shortName": "search_assignments" - }, - "description": "Sample for SearchAssignments", - "file": "bigqueryreservation_v1_generated_reservation_service_search_assignments_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_SearchAssignments_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_search_assignments_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.split_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "SplitCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "slot_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentResponse", - "shortName": "split_capacity_commitment" - }, - "description": "Sample for SplitCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_SplitCapacityCommitment_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.split_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "SplitCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "slot_count", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.SplitCapacityCommitmentResponse", - "shortName": "split_capacity_commitment" - }, - "description": "Sample for SplitCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_SplitCapacityCommitment_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.update_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateAssignmentRequest" - }, - { - "name": "assignment", - "type": "google.cloud.bigquery_reservation_v1.types.Assignment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Assignment", - "shortName": "update_assignment" - }, - "description": "Sample for UpdateAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_update_assignment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateAssignment_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_assignment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.update_assignment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateAssignment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateAssignment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateAssignmentRequest" - }, - { - "name": "assignment", - "type": "google.cloud.bigquery_reservation_v1.types.Assignment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Assignment", - "shortName": "update_assignment" - }, - "description": "Sample for UpdateAssignment", - "file": "bigqueryreservation_v1_generated_reservation_service_update_assignment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateAssignment_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_assignment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.update_bi_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateBiReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateBiReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateBiReservationRequest" - }, - { - "name": "bi_reservation", - "type": "google.cloud.bigquery_reservation_v1.types.BiReservation" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", - "shortName": "update_bi_reservation" - }, - "description": "Sample for UpdateBiReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateBiReservation_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.update_bi_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateBiReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateBiReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateBiReservationRequest" - }, - { - "name": "bi_reservation", - "type": "google.cloud.bigquery_reservation_v1.types.BiReservation" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", - "shortName": "update_bi_reservation" - }, - "description": "Sample for UpdateBiReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateBiReservation_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_bi_reservation_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.update_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateCapacityCommitmentRequest" - }, - { - "name": "capacity_commitment", - "type": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "update_capacity_commitment" - }, - "description": "Sample for UpdateCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateCapacityCommitment_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.update_capacity_commitment", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateCapacityCommitment", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateCapacityCommitment" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateCapacityCommitmentRequest" - }, - { - "name": "capacity_commitment", - "type": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "update_capacity_commitment" - }, - "description": "Sample for UpdateCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateCapacityCommitment_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_capacity_commitment_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", - "shortName": "ReservationServiceAsyncClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.update_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateReservationRequest" - }, - { - "name": "reservation", - "type": "google.cloud.bigquery_reservation_v1.types.Reservation" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "update_reservation" - }, - "description": "Sample for UpdateReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_update_reservation_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateReservation_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_reservation_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", - "shortName": "ReservationServiceClient" - }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.update_reservation", - "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.UpdateReservation", - "service": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", - "shortName": "ReservationService" - }, - "shortName": "UpdateReservation" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.UpdateReservationRequest" - }, - { - "name": "reservation", - "type": "google.cloud.bigquery_reservation_v1.types.Reservation" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "update_reservation" - }, - "description": "Sample for UpdateReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_update_reservation_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_UpdateReservation_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "bigqueryreservation_v1_generated_reservation_service_update_reservation_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/scripts/fixup_bigquery_reservation_v1_keywords.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/scripts/fixup_bigquery_reservation_v1_keywords.py deleted file mode 100644 index aa17d6c0ad42..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/scripts/fixup_bigquery_reservation_v1_keywords.py +++ /dev/null @@ -1,197 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_reservationCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_assignment': ('parent', 'assignment', 'assignment_id', ), - 'create_capacity_commitment': ('parent', 'capacity_commitment', 'enforce_single_admin_project_per_org', 'capacity_commitment_id', ), - 'create_reservation': ('parent', 'reservation_id', 'reservation', ), - 'delete_assignment': ('name', ), - 'delete_capacity_commitment': ('name', 'force', ), - 'delete_reservation': ('name', ), - 'failover_reservation': ('name', ), - 'get_bi_reservation': ('name', ), - 'get_capacity_commitment': ('name', ), - 'get_reservation': ('name', ), - 'list_assignments': ('parent', 'page_size', 'page_token', ), - 'list_capacity_commitments': ('parent', 'page_size', 'page_token', ), - 'list_reservations': ('parent', 'page_size', 'page_token', ), - 'merge_capacity_commitments': ('parent', 'capacity_commitment_ids', ), - 'move_assignment': ('name', 'destination_id', 'assignment_id', ), - 'search_all_assignments': ('parent', 'query', 'page_size', 'page_token', ), - 'search_assignments': ('parent', 'query', 'page_size', 'page_token', ), - 'split_capacity_commitment': ('name', 'slot_count', ), - 'update_assignment': ('assignment', 'update_mask', ), - 'update_bi_reservation': ('bi_reservation', 'update_mask', ), - 'update_capacity_commitment': ('capacity_commitment', 'update_mask', ), - 'update_reservation': ('reservation', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_reservationCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_reservation client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/setup.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/setup.py deleted file mode 100644 index 03e045a26fcc..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-bigquery-reservation' - - -description = "Google Cloud Bigquery Reservation API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/bigquery_reservation/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/bigquery_reservation_v1/__init__.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/bigquery_reservation_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/bigquery_reservation_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py deleted file mode 100644 index 4b75796e13ee..000000000000 --- a/owl-bot-staging/google-cloud-bigquery-reservation/v1/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ /dev/null @@ -1,17853 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.bigquery_reservation_v1.services.reservation_service import ReservationServiceAsyncClient -from google.cloud.bigquery_reservation_v1.services.reservation_service import ReservationServiceClient -from google.cloud.bigquery_reservation_v1.services.reservation_service import pagers -from google.cloud.bigquery_reservation_v1.services.reservation_service import transports -from google.cloud.bigquery_reservation_v1.types import reservation -from google.cloud.bigquery_reservation_v1.types import reservation as gcbr_reservation -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert ReservationServiceClient._get_default_mtls_endpoint(None) is None - assert ReservationServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert ReservationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert ReservationServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert ReservationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert ReservationServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert ReservationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ReservationServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ReservationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ReservationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ReservationServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ReservationServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ReservationServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - ReservationServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert ReservationServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert ReservationServiceClient._get_client_cert_source(None, False) is None - assert ReservationServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert ReservationServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert ReservationServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert ReservationServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(ReservationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceClient)) -@mock.patch.object(ReservationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = ReservationServiceClient._DEFAULT_UNIVERSE - default_endpoint = ReservationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ReservationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert ReservationServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert ReservationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ReservationServiceClient.DEFAULT_MTLS_ENDPOINT - assert ReservationServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert ReservationServiceClient._get_api_endpoint(None, None, default_universe, "always") == ReservationServiceClient.DEFAULT_MTLS_ENDPOINT - assert ReservationServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ReservationServiceClient.DEFAULT_MTLS_ENDPOINT - assert ReservationServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert ReservationServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - ReservationServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert ReservationServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert ReservationServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert ReservationServiceClient._get_universe_domain(None, None) == ReservationServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - ReservationServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = ReservationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = ReservationServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (ReservationServiceClient, "grpc"), - (ReservationServiceAsyncClient, "grpc_asyncio"), - (ReservationServiceClient, "rest"), -]) -def test_reservation_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigqueryreservation.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigqueryreservation.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.ReservationServiceGrpcTransport, "grpc"), - (transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ReservationServiceRestTransport, "rest"), -]) -def test_reservation_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (ReservationServiceClient, "grpc"), - (ReservationServiceAsyncClient, "grpc_asyncio"), - (ReservationServiceClient, "rest"), -]) -def test_reservation_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'bigqueryreservation.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://bigqueryreservation.googleapis.com' - ) - - -def test_reservation_service_client_get_transport_class(): - transport = ReservationServiceClient.get_transport_class() - available_transports = [ - transports.ReservationServiceGrpcTransport, - transports.ReservationServiceRestTransport, - ] - assert transport in available_transports - - transport = ReservationServiceClient.get_transport_class("grpc") - assert transport == transports.ReservationServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest"), -]) -@mock.patch.object(ReservationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceClient)) -@mock.patch.object(ReservationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceAsyncClient)) -def test_reservation_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(ReservationServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(ReservationServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc", "true"), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc", "false"), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest", "true"), - (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(ReservationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceClient)) -@mock.patch.object(ReservationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_reservation_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - ReservationServiceClient, ReservationServiceAsyncClient -]) -@mock.patch.object(ReservationServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationServiceClient)) -@mock.patch.object(ReservationServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReservationServiceAsyncClient)) -def test_reservation_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - ReservationServiceClient, ReservationServiceAsyncClient -]) -@mock.patch.object(ReservationServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceClient)) -@mock.patch.object(ReservationServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ReservationServiceAsyncClient)) -def test_reservation_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = ReservationServiceClient._DEFAULT_UNIVERSE - default_endpoint = ReservationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = ReservationServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc"), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest"), -]) -def test_reservation_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc", grpc_helpers), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ReservationServiceClient, transports.ReservationServiceRestTransport, "rest", None), -]) -def test_reservation_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_reservation_service_client_client_options_from_dict(): - with mock.patch('google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = ReservationServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport, "grpc", grpc_helpers), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_reservation_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "bigqueryreservation.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="bigqueryreservation.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gcbr_reservation.CreateReservationRequest, - dict, -]) -def test_create_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - response = client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcbr_reservation.CreateReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -def test_create_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcbr_reservation.CreateReservationRequest( - parent='parent_value', - reservation_id='reservation_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcbr_reservation.CreateReservationRequest( - parent='parent_value', - reservation_id='reservation_id_value', - ) - -def test_create_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_reservation] = mock_rpc - request = {} - client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_reservation] = mock_rpc - - request = {} - await client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_reservation_async(transport: str = 'grpc_asyncio', request_type=gcbr_reservation.CreateReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - response = await client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcbr_reservation.CreateReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.asyncio -async def test_create_reservation_async_from_dict(): - await test_create_reservation_async(request_type=dict) - -def test_create_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbr_reservation.CreateReservationRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - call.return_value = gcbr_reservation.Reservation() - client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbr_reservation.CreateReservationRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation()) - await client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_reservation_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbr_reservation.Reservation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_reservation( - parent='parent_value', - reservation=gcbr_reservation.Reservation(name='name_value'), - reservation_id='reservation_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].reservation - mock_val = gcbr_reservation.Reservation(name='name_value') - assert arg == mock_val - arg = args[0].reservation_id - mock_val = 'reservation_id_value' - assert arg == mock_val - - -def test_create_reservation_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_reservation( - gcbr_reservation.CreateReservationRequest(), - parent='parent_value', - reservation=gcbr_reservation.Reservation(name='name_value'), - reservation_id='reservation_id_value', - ) - -@pytest.mark.asyncio -async def test_create_reservation_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbr_reservation.Reservation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_reservation( - parent='parent_value', - reservation=gcbr_reservation.Reservation(name='name_value'), - reservation_id='reservation_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].reservation - mock_val = gcbr_reservation.Reservation(name='name_value') - assert arg == mock_val - arg = args[0].reservation_id - mock_val = 'reservation_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_reservation_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_reservation( - gcbr_reservation.CreateReservationRequest(), - parent='parent_value', - reservation=gcbr_reservation.Reservation(name='name_value'), - reservation_id='reservation_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.ListReservationsRequest, - dict, -]) -def test_list_reservations(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListReservationsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.ListReservationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReservationsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_reservations_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.ListReservationsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_reservations(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.ListReservationsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_reservations_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_reservations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_reservations] = mock_rpc - request = {} - client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_reservations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_reservations_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_reservations in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_reservations] = mock_rpc - - request = {} - await client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_reservations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_reservations_async(transport: str = 'grpc_asyncio', request_type=reservation.ListReservationsRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListReservationsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.ListReservationsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReservationsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_reservations_async_from_dict(): - await test_list_reservations_async(request_type=dict) - -def test_list_reservations_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.ListReservationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - call.return_value = reservation.ListReservationsResponse() - client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_reservations_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.ListReservationsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListReservationsResponse()) - await client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_reservations_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListReservationsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_reservations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_reservations_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_reservations( - reservation.ListReservationsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_reservations_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListReservationsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListReservationsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_reservations( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_reservations_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_reservations( - reservation.ListReservationsRequest(), - parent='parent_value', - ) - - -def test_list_reservations_pager(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - reservation.Reservation(), - ], - next_page_token='abc', - ), - reservation.ListReservationsResponse( - reservations=[], - next_page_token='def', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - ], - next_page_token='ghi', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_reservations(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Reservation) - for i in results) -def test_list_reservations_pages(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - reservation.Reservation(), - ], - next_page_token='abc', - ), - reservation.ListReservationsResponse( - reservations=[], - next_page_token='def', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - ], - next_page_token='ghi', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - ], - ), - RuntimeError, - ) - pages = list(client.list_reservations(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_reservations_async_pager(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - reservation.Reservation(), - ], - next_page_token='abc', - ), - reservation.ListReservationsResponse( - reservations=[], - next_page_token='def', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - ], - next_page_token='ghi', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_reservations(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, reservation.Reservation) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_reservations_async_pages(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - reservation.Reservation(), - ], - next_page_token='abc', - ), - reservation.ListReservationsResponse( - reservations=[], - next_page_token='def', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - ], - next_page_token='ghi', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_reservations(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - reservation.GetReservationRequest, - dict, -]) -def test_get_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - response = client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.GetReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -def test_get_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.GetReservationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.GetReservationRequest( - name='name_value', - ) - -def test_get_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_reservation] = mock_rpc - request = {} - client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_reservation] = mock_rpc - - request = {} - await client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_reservation_async(transport: str = 'grpc_asyncio', request_type=reservation.GetReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - response = await client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.GetReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.asyncio -async def test_get_reservation_async_from_dict(): - await test_get_reservation_async(request_type=dict) - -def test_get_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.GetReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - call.return_value = reservation.Reservation() - client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.GetReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation()) - await client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_reservation_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Reservation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_reservation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_reservation_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_reservation( - reservation.GetReservationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_reservation_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Reservation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_reservation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_reservation_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_reservation( - reservation.GetReservationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.DeleteReservationRequest, - dict, -]) -def test_delete_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.DeleteReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.DeleteReservationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.DeleteReservationRequest( - name='name_value', - ) - -def test_delete_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_reservation] = mock_rpc - request = {} - client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_reservation] = mock_rpc - - request = {} - await client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_reservation_async(transport: str = 'grpc_asyncio', request_type=reservation.DeleteReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.DeleteReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_reservation_async_from_dict(): - await test_delete_reservation_async(request_type=dict) - -def test_delete_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.DeleteReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - call.return_value = None - client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.DeleteReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_reservation_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_reservation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_reservation_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_reservation( - reservation.DeleteReservationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_reservation_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_reservation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_reservation_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_reservation( - reservation.DeleteReservationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gcbr_reservation.UpdateReservationRequest, - dict, -]) -def test_update_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - response = client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = gcbr_reservation.UpdateReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -def test_update_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = gcbr_reservation.UpdateReservationRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gcbr_reservation.UpdateReservationRequest( - ) - -def test_update_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_reservation] = mock_rpc - request = {} - client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_reservation] = mock_rpc - - request = {} - await client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_reservation_async(transport: str = 'grpc_asyncio', request_type=gcbr_reservation.UpdateReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - response = await client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = gcbr_reservation.UpdateReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.asyncio -async def test_update_reservation_async_from_dict(): - await test_update_reservation_async(request_type=dict) - -def test_update_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbr_reservation.UpdateReservationRequest() - - request.reservation.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - call.return_value = gcbr_reservation.Reservation() - client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'reservation.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gcbr_reservation.UpdateReservationRequest() - - request.reservation.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation()) - await client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'reservation.name=name_value', - ) in kw['metadata'] - - -def test_update_reservation_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbr_reservation.Reservation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_reservation( - reservation=gcbr_reservation.Reservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].reservation - mock_val = gcbr_reservation.Reservation(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_reservation_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_reservation( - gcbr_reservation.UpdateReservationRequest(), - reservation=gcbr_reservation.Reservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_reservation_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gcbr_reservation.Reservation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_reservation( - reservation=gcbr_reservation.Reservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].reservation - mock_val = gcbr_reservation.Reservation(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_reservation_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_reservation( - gcbr_reservation.UpdateReservationRequest(), - reservation=gcbr_reservation.Reservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.FailoverReservationRequest, - dict, -]) -def test_failover_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - response = client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.FailoverReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -def test_failover_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.FailoverReservationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.failover_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.FailoverReservationRequest( - name='name_value', - ) - -def test_failover_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.failover_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.failover_reservation] = mock_rpc - request = {} - client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.failover_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_failover_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.failover_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.failover_reservation] = mock_rpc - - request = {} - await client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.failover_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_failover_reservation_async(transport: str = 'grpc_asyncio', request_type=reservation.FailoverReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - response = await client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.FailoverReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.asyncio -async def test_failover_reservation_async_from_dict(): - await test_failover_reservation_async(request_type=dict) - -def test_failover_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.FailoverReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - call.return_value = reservation.Reservation() - client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_failover_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.FailoverReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation()) - await client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - reservation.CreateCapacityCommitmentRequest, - dict, -]) -def test_create_capacity_commitment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - response = client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.CreateCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -def test_create_capacity_commitment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.CreateCapacityCommitmentRequest( - parent='parent_value', - capacity_commitment_id='capacity_commitment_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_capacity_commitment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.CreateCapacityCommitmentRequest( - parent='parent_value', - capacity_commitment_id='capacity_commitment_id_value', - ) - -def test_create_capacity_commitment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_capacity_commitment] = mock_rpc - request = {} - client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_capacity_commitment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_capacity_commitment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_capacity_commitment] = mock_rpc - - request = {} - await client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_capacity_commitment_async(transport: str = 'grpc_asyncio', request_type=reservation.CreateCapacityCommitmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - response = await client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.CreateCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.asyncio -async def test_create_capacity_commitment_async_from_dict(): - await test_create_capacity_commitment_async(request_type=dict) - -def test_create_capacity_commitment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.CreateCapacityCommitmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_capacity_commitment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.CreateCapacityCommitmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - await client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_capacity_commitment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_capacity_commitment( - parent='parent_value', - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].capacity_commitment - mock_val = reservation.CapacityCommitment(name='name_value') - assert arg == mock_val - - -def test_create_capacity_commitment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_capacity_commitment( - reservation.CreateCapacityCommitmentRequest(), - parent='parent_value', - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_capacity_commitment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_capacity_commitment( - parent='parent_value', - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].capacity_commitment - mock_val = reservation.CapacityCommitment(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_capacity_commitment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_capacity_commitment( - reservation.CreateCapacityCommitmentRequest(), - parent='parent_value', - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.ListCapacityCommitmentsRequest, - dict, -]) -def test_list_capacity_commitments(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListCapacityCommitmentsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.ListCapacityCommitmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCapacityCommitmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_capacity_commitments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.ListCapacityCommitmentsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_capacity_commitments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.ListCapacityCommitmentsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_capacity_commitments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_capacity_commitments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_capacity_commitments] = mock_rpc - request = {} - client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_capacity_commitments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_capacity_commitments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_capacity_commitments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_capacity_commitments] = mock_rpc - - request = {} - await client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_capacity_commitments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_capacity_commitments_async(transport: str = 'grpc_asyncio', request_type=reservation.ListCapacityCommitmentsRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListCapacityCommitmentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.ListCapacityCommitmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCapacityCommitmentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_capacity_commitments_async_from_dict(): - await test_list_capacity_commitments_async(request_type=dict) - -def test_list_capacity_commitments_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.ListCapacityCommitmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - call.return_value = reservation.ListCapacityCommitmentsResponse() - client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_capacity_commitments_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.ListCapacityCommitmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListCapacityCommitmentsResponse()) - await client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_capacity_commitments_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListCapacityCommitmentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_capacity_commitments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_capacity_commitments_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_capacity_commitments( - reservation.ListCapacityCommitmentsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_capacity_commitments_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListCapacityCommitmentsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListCapacityCommitmentsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_capacity_commitments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_capacity_commitments_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_capacity_commitments( - reservation.ListCapacityCommitmentsRequest(), - parent='parent_value', - ) - - -def test_list_capacity_commitments_pager(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - next_page_token='abc', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[], - next_page_token='def', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - ], - next_page_token='ghi', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_capacity_commitments(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.CapacityCommitment) - for i in results) -def test_list_capacity_commitments_pages(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - next_page_token='abc', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[], - next_page_token='def', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - ], - next_page_token='ghi', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_capacity_commitments(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_capacity_commitments_async_pager(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - next_page_token='abc', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[], - next_page_token='def', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - ], - next_page_token='ghi', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_capacity_commitments(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, reservation.CapacityCommitment) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_capacity_commitments_async_pages(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - next_page_token='abc', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[], - next_page_token='def', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - ], - next_page_token='ghi', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_capacity_commitments(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - reservation.GetCapacityCommitmentRequest, - dict, -]) -def test_get_capacity_commitment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - response = client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.GetCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -def test_get_capacity_commitment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.GetCapacityCommitmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_capacity_commitment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.GetCapacityCommitmentRequest( - name='name_value', - ) - -def test_get_capacity_commitment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_capacity_commitment] = mock_rpc - request = {} - client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_capacity_commitment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_capacity_commitment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_capacity_commitment] = mock_rpc - - request = {} - await client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_capacity_commitment_async(transport: str = 'grpc_asyncio', request_type=reservation.GetCapacityCommitmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - response = await client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.GetCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.asyncio -async def test_get_capacity_commitment_async_from_dict(): - await test_get_capacity_commitment_async(request_type=dict) - -def test_get_capacity_commitment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.GetCapacityCommitmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_capacity_commitment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.GetCapacityCommitmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - await client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_capacity_commitment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_capacity_commitment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_capacity_commitment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_capacity_commitment( - reservation.GetCapacityCommitmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_capacity_commitment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_capacity_commitment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_capacity_commitment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_capacity_commitment( - reservation.GetCapacityCommitmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.DeleteCapacityCommitmentRequest, - dict, -]) -def test_delete_capacity_commitment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.DeleteCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_capacity_commitment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.DeleteCapacityCommitmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_capacity_commitment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.DeleteCapacityCommitmentRequest( - name='name_value', - ) - -def test_delete_capacity_commitment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_capacity_commitment] = mock_rpc - request = {} - client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_capacity_commitment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_capacity_commitment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_capacity_commitment] = mock_rpc - - request = {} - await client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_capacity_commitment_async(transport: str = 'grpc_asyncio', request_type=reservation.DeleteCapacityCommitmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.DeleteCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_capacity_commitment_async_from_dict(): - await test_delete_capacity_commitment_async(request_type=dict) - -def test_delete_capacity_commitment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.DeleteCapacityCommitmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - call.return_value = None - client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_capacity_commitment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.DeleteCapacityCommitmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_capacity_commitment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_capacity_commitment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_capacity_commitment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_capacity_commitment( - reservation.DeleteCapacityCommitmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_capacity_commitment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_capacity_commitment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_capacity_commitment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_capacity_commitment( - reservation.DeleteCapacityCommitmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.UpdateCapacityCommitmentRequest, - dict, -]) -def test_update_capacity_commitment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - response = client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.UpdateCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -def test_update_capacity_commitment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.UpdateCapacityCommitmentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_capacity_commitment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.UpdateCapacityCommitmentRequest( - ) - -def test_update_capacity_commitment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_capacity_commitment] = mock_rpc - request = {} - client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_capacity_commitment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_capacity_commitment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_capacity_commitment] = mock_rpc - - request = {} - await client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_capacity_commitment_async(transport: str = 'grpc_asyncio', request_type=reservation.UpdateCapacityCommitmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - response = await client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.UpdateCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.asyncio -async def test_update_capacity_commitment_async_from_dict(): - await test_update_capacity_commitment_async(request_type=dict) - -def test_update_capacity_commitment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.UpdateCapacityCommitmentRequest() - - request.capacity_commitment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'capacity_commitment.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_capacity_commitment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.UpdateCapacityCommitmentRequest() - - request.capacity_commitment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - await client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'capacity_commitment.name=name_value', - ) in kw['metadata'] - - -def test_update_capacity_commitment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_capacity_commitment( - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].capacity_commitment - mock_val = reservation.CapacityCommitment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_capacity_commitment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_capacity_commitment( - reservation.UpdateCapacityCommitmentRequest(), - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_capacity_commitment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_capacity_commitment( - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].capacity_commitment - mock_val = reservation.CapacityCommitment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_capacity_commitment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_capacity_commitment( - reservation.UpdateCapacityCommitmentRequest(), - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.SplitCapacityCommitmentRequest, - dict, -]) -def test_split_capacity_commitment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SplitCapacityCommitmentResponse( - ) - response = client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.SplitCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.SplitCapacityCommitmentResponse) - - -def test_split_capacity_commitment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.SplitCapacityCommitmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.split_capacity_commitment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.SplitCapacityCommitmentRequest( - name='name_value', - ) - -def test_split_capacity_commitment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.split_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.split_capacity_commitment] = mock_rpc - request = {} - client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.split_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_split_capacity_commitment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.split_capacity_commitment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.split_capacity_commitment] = mock_rpc - - request = {} - await client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.split_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_split_capacity_commitment_async(transport: str = 'grpc_asyncio', request_type=reservation.SplitCapacityCommitmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.SplitCapacityCommitmentResponse( - )) - response = await client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.SplitCapacityCommitmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.SplitCapacityCommitmentResponse) - - -@pytest.mark.asyncio -async def test_split_capacity_commitment_async_from_dict(): - await test_split_capacity_commitment_async(request_type=dict) - -def test_split_capacity_commitment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.SplitCapacityCommitmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - call.return_value = reservation.SplitCapacityCommitmentResponse() - client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_split_capacity_commitment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.SplitCapacityCommitmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SplitCapacityCommitmentResponse()) - await client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_split_capacity_commitment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SplitCapacityCommitmentResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.split_capacity_commitment( - name='name_value', - slot_count=1098, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].slot_count - mock_val = 1098 - assert arg == mock_val - - -def test_split_capacity_commitment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.split_capacity_commitment( - reservation.SplitCapacityCommitmentRequest(), - name='name_value', - slot_count=1098, - ) - -@pytest.mark.asyncio -async def test_split_capacity_commitment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SplitCapacityCommitmentResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SplitCapacityCommitmentResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.split_capacity_commitment( - name='name_value', - slot_count=1098, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].slot_count - mock_val = 1098 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_split_capacity_commitment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.split_capacity_commitment( - reservation.SplitCapacityCommitmentRequest(), - name='name_value', - slot_count=1098, - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.MergeCapacityCommitmentsRequest, - dict, -]) -def test_merge_capacity_commitments(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - response = client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.MergeCapacityCommitmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -def test_merge_capacity_commitments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.MergeCapacityCommitmentsRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.merge_capacity_commitments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.MergeCapacityCommitmentsRequest( - parent='parent_value', - ) - -def test_merge_capacity_commitments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.merge_capacity_commitments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.merge_capacity_commitments] = mock_rpc - request = {} - client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.merge_capacity_commitments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_merge_capacity_commitments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.merge_capacity_commitments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.merge_capacity_commitments] = mock_rpc - - request = {} - await client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.merge_capacity_commitments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_merge_capacity_commitments_async(transport: str = 'grpc_asyncio', request_type=reservation.MergeCapacityCommitmentsRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - response = await client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.MergeCapacityCommitmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.asyncio -async def test_merge_capacity_commitments_async_from_dict(): - await test_merge_capacity_commitments_async(request_type=dict) - -def test_merge_capacity_commitments_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.MergeCapacityCommitmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_merge_capacity_commitments_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.MergeCapacityCommitmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - await client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_merge_capacity_commitments_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.merge_capacity_commitments( - parent='parent_value', - capacity_commitment_ids=['capacity_commitment_ids_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].capacity_commitment_ids - mock_val = ['capacity_commitment_ids_value'] - assert arg == mock_val - - -def test_merge_capacity_commitments_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.merge_capacity_commitments( - reservation.MergeCapacityCommitmentsRequest(), - parent='parent_value', - capacity_commitment_ids=['capacity_commitment_ids_value'], - ) - -@pytest.mark.asyncio -async def test_merge_capacity_commitments_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.CapacityCommitment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.merge_capacity_commitments( - parent='parent_value', - capacity_commitment_ids=['capacity_commitment_ids_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].capacity_commitment_ids - mock_val = ['capacity_commitment_ids_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_merge_capacity_commitments_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.merge_capacity_commitments( - reservation.MergeCapacityCommitmentsRequest(), - parent='parent_value', - capacity_commitment_ids=['capacity_commitment_ids_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.CreateAssignmentRequest, - dict, -]) -def test_create_assignment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - response = client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.CreateAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -def test_create_assignment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.CreateAssignmentRequest( - parent='parent_value', - assignment_id='assignment_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_assignment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.CreateAssignmentRequest( - parent='parent_value', - assignment_id='assignment_id_value', - ) - -def test_create_assignment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_assignment] = mock_rpc - request = {} - client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_assignment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_assignment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_assignment] = mock_rpc - - request = {} - await client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_assignment_async(transport: str = 'grpc_asyncio', request_type=reservation.CreateAssignmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - )) - response = await client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.CreateAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -@pytest.mark.asyncio -async def test_create_assignment_async_from_dict(): - await test_create_assignment_async(request_type=dict) - -def test_create_assignment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.CreateAssignmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - call.return_value = reservation.Assignment() - client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_assignment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.CreateAssignmentRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment()) - await client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_assignment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_assignment( - parent='parent_value', - assignment=reservation.Assignment(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].assignment - mock_val = reservation.Assignment(name='name_value') - assert arg == mock_val - - -def test_create_assignment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_assignment( - reservation.CreateAssignmentRequest(), - parent='parent_value', - assignment=reservation.Assignment(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_assignment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_assignment( - parent='parent_value', - assignment=reservation.Assignment(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].assignment - mock_val = reservation.Assignment(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_assignment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_assignment( - reservation.CreateAssignmentRequest(), - parent='parent_value', - assignment=reservation.Assignment(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.ListAssignmentsRequest, - dict, -]) -def test_list_assignments(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListAssignmentsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.ListAssignmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssignmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_assignments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.ListAssignmentsRequest( - parent='parent_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assignments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.ListAssignmentsRequest( - parent='parent_value', - page_token='page_token_value', - ) - -def test_list_assignments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assignments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assignments] = mock_rpc - request = {} - client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assignments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_assignments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_assignments] = mock_rpc - - request = {} - await client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_assignments_async(transport: str = 'grpc_asyncio', request_type=reservation.ListAssignmentsRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListAssignmentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.ListAssignmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssignmentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_assignments_async_from_dict(): - await test_list_assignments_async(request_type=dict) - -def test_list_assignments_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.ListAssignmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - call.return_value = reservation.ListAssignmentsResponse() - client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_assignments_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.ListAssignmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListAssignmentsResponse()) - await client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_assignments_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListAssignmentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_assignments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_assignments_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assignments( - reservation.ListAssignmentsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_assignments_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.ListAssignmentsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListAssignmentsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_assignments( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_assignments_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_assignments( - reservation.ListAssignmentsRequest(), - parent='parent_value', - ) - - -def test_list_assignments_pager(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.ListAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_assignments(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in results) -def test_list_assignments_pages(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.ListAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - pages = list(client.list_assignments(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_assignments_async_pager(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.ListAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_assignments(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_assignments_async_pages(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.ListAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_assignments(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - reservation.DeleteAssignmentRequest, - dict, -]) -def test_delete_assignment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.DeleteAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_assignment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.DeleteAssignmentRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_assignment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.DeleteAssignmentRequest( - name='name_value', - ) - -def test_delete_assignment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_assignment] = mock_rpc - request = {} - client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_assignment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_assignment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_assignment] = mock_rpc - - request = {} - await client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_assignment_async(transport: str = 'grpc_asyncio', request_type=reservation.DeleteAssignmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.DeleteAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_assignment_async_from_dict(): - await test_delete_assignment_async(request_type=dict) - -def test_delete_assignment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.DeleteAssignmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - call.return_value = None - client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_assignment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.DeleteAssignmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_assignment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_assignment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_assignment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_assignment( - reservation.DeleteAssignmentRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_assignment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_assignment( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_assignment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_assignment( - reservation.DeleteAssignmentRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.SearchAssignmentsRequest, - dict, -]) -def test_search_assignments(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SearchAssignmentsResponse( - next_page_token='next_page_token_value', - ) - response = client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.SearchAssignmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAssignmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_assignments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.SearchAssignmentsRequest( - parent='parent_value', - query='query_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_assignments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.SearchAssignmentsRequest( - parent='parent_value', - query='query_value', - page_token='page_token_value', - ) - -def test_search_assignments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_assignments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_assignments] = mock_rpc - request = {} - client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_assignments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_assignments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_assignments] = mock_rpc - - request = {} - await client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_assignments_async(transport: str = 'grpc_asyncio', request_type=reservation.SearchAssignmentsRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAssignmentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.SearchAssignmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAssignmentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_assignments_async_from_dict(): - await test_search_assignments_async(request_type=dict) - -def test_search_assignments_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.SearchAssignmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - call.return_value = reservation.SearchAssignmentsResponse() - client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_assignments_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.SearchAssignmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAssignmentsResponse()) - await client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_search_assignments_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SearchAssignmentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_assignments( - parent='parent_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_assignments_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_assignments( - reservation.SearchAssignmentsRequest(), - parent='parent_value', - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_assignments_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SearchAssignmentsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAssignmentsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_assignments( - parent='parent_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_assignments_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_assignments( - reservation.SearchAssignmentsRequest(), - parent='parent_value', - query='query_value', - ) - - -def test_search_assignments_pager(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.search_assignments(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in results) -def test_search_assignments_pages(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - pages = list(client.search_assignments(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_assignments_async_pager(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_assignments(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_assignments_async_pages(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_assignments(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - reservation.SearchAllAssignmentsRequest, - dict, -]) -def test_search_all_assignments(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SearchAllAssignmentsResponse( - next_page_token='next_page_token_value', - ) - response = client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.SearchAllAssignmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllAssignmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_search_all_assignments_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.SearchAllAssignmentsRequest( - parent='parent_value', - query='query_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_assignments(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.SearchAllAssignmentsRequest( - parent='parent_value', - query='query_value', - page_token='page_token_value', - ) - -def test_search_all_assignments_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_assignments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_assignments] = mock_rpc - request = {} - client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_assignments_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.search_all_assignments in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.search_all_assignments] = mock_rpc - - request = {} - await client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.search_all_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_search_all_assignments_async(transport: str = 'grpc_asyncio', request_type=reservation.SearchAllAssignmentsRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAllAssignmentsResponse( - next_page_token='next_page_token_value', - )) - response = await client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.SearchAllAssignmentsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllAssignmentsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_search_all_assignments_async_from_dict(): - await test_search_all_assignments_async(request_type=dict) - -def test_search_all_assignments_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.SearchAllAssignmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - call.return_value = reservation.SearchAllAssignmentsResponse() - client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_search_all_assignments_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.SearchAllAssignmentsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAllAssignmentsResponse()) - await client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_search_all_assignments_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SearchAllAssignmentsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.search_all_assignments( - parent='parent_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - - -def test_search_all_assignments_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_assignments( - reservation.SearchAllAssignmentsRequest(), - parent='parent_value', - query='query_value', - ) - -@pytest.mark.asyncio -async def test_search_all_assignments_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.SearchAllAssignmentsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAllAssignmentsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.search_all_assignments( - parent='parent_value', - query='query_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].query - mock_val = 'query_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_search_all_assignments_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.search_all_assignments( - reservation.SearchAllAssignmentsRequest(), - parent='parent_value', - query='query_value', - ) - - -def test_search_all_assignments_pager(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.search_all_assignments(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in results) -def test_search_all_assignments_pages(transport_name: str = "grpc"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - pages = list(client.search_all_assignments(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_search_all_assignments_async_pager(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - async_pager = await client.search_all_assignments(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in responses) - - -@pytest.mark.asyncio -async def test_search_all_assignments_async_pages(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.search_all_assignments(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - reservation.MoveAssignmentRequest, - dict, -]) -def test_move_assignment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - response = client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.MoveAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -def test_move_assignment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.MoveAssignmentRequest( - name='name_value', - destination_id='destination_id_value', - assignment_id='assignment_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.move_assignment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.MoveAssignmentRequest( - name='name_value', - destination_id='destination_id_value', - assignment_id='assignment_id_value', - ) - -def test_move_assignment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.move_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.move_assignment] = mock_rpc - request = {} - client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.move_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_move_assignment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.move_assignment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.move_assignment] = mock_rpc - - request = {} - await client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.move_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_move_assignment_async(transport: str = 'grpc_asyncio', request_type=reservation.MoveAssignmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - )) - response = await client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.MoveAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -@pytest.mark.asyncio -async def test_move_assignment_async_from_dict(): - await test_move_assignment_async(request_type=dict) - -def test_move_assignment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.MoveAssignmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - call.return_value = reservation.Assignment() - client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_move_assignment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.MoveAssignmentRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment()) - await client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_move_assignment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.move_assignment( - name='name_value', - destination_id='destination_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].destination_id - mock_val = 'destination_id_value' - assert arg == mock_val - - -def test_move_assignment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.move_assignment( - reservation.MoveAssignmentRequest(), - name='name_value', - destination_id='destination_id_value', - ) - -@pytest.mark.asyncio -async def test_move_assignment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.move_assignment( - name='name_value', - destination_id='destination_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].destination_id - mock_val = 'destination_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_move_assignment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.move_assignment( - reservation.MoveAssignmentRequest(), - name='name_value', - destination_id='destination_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.UpdateAssignmentRequest, - dict, -]) -def test_update_assignment(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - response = client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.UpdateAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -def test_update_assignment_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.UpdateAssignmentRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_assignment(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.UpdateAssignmentRequest( - ) - -def test_update_assignment_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_assignment] = mock_rpc - request = {} - client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_assignment_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_assignment in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_assignment] = mock_rpc - - request = {} - await client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_assignment_async(transport: str = 'grpc_asyncio', request_type=reservation.UpdateAssignmentRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - )) - response = await client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.UpdateAssignmentRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -@pytest.mark.asyncio -async def test_update_assignment_async_from_dict(): - await test_update_assignment_async(request_type=dict) - -def test_update_assignment_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.UpdateAssignmentRequest() - - request.assignment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - call.return_value = reservation.Assignment() - client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'assignment.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_assignment_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.UpdateAssignmentRequest() - - request.assignment.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment()) - await client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'assignment.name=name_value', - ) in kw['metadata'] - - -def test_update_assignment_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_assignment( - assignment=reservation.Assignment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].assignment - mock_val = reservation.Assignment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_assignment_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_assignment( - reservation.UpdateAssignmentRequest(), - assignment=reservation.Assignment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_assignment_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.Assignment() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_assignment( - assignment=reservation.Assignment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].assignment - mock_val = reservation.Assignment(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_assignment_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_assignment( - reservation.UpdateAssignmentRequest(), - assignment=reservation.Assignment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.GetBiReservationRequest, - dict, -]) -def test_get_bi_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.BiReservation( - name='name_value', - size=443, - ) - response = client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.GetBiReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == 'name_value' - assert response.size == 443 - - -def test_get_bi_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.GetBiReservationRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_bi_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.GetBiReservationRequest( - name='name_value', - ) - -def test_get_bi_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_bi_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_bi_reservation] = mock_rpc - request = {} - client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_bi_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_bi_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_bi_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_bi_reservation] = mock_rpc - - request = {} - await client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_bi_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_bi_reservation_async(transport: str = 'grpc_asyncio', request_type=reservation.GetBiReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation( - name='name_value', - size=443, - )) - response = await client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.GetBiReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == 'name_value' - assert response.size == 443 - - -@pytest.mark.asyncio -async def test_get_bi_reservation_async_from_dict(): - await test_get_bi_reservation_async(request_type=dict) - -def test_get_bi_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.GetBiReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - call.return_value = reservation.BiReservation() - client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_bi_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.GetBiReservationRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation()) - await client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_bi_reservation_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.BiReservation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_bi_reservation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_bi_reservation_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_bi_reservation( - reservation.GetBiReservationRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_bi_reservation_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.BiReservation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_bi_reservation( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_bi_reservation_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_bi_reservation( - reservation.GetBiReservationRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - reservation.UpdateBiReservationRequest, - dict, -]) -def test_update_bi_reservation(request_type, transport: str = 'grpc'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.BiReservation( - name='name_value', - size=443, - ) - response = client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = reservation.UpdateBiReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == 'name_value' - assert response.size == 443 - - -def test_update_bi_reservation_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = reservation.UpdateBiReservationRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_bi_reservation(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == reservation.UpdateBiReservationRequest( - ) - -def test_update_bi_reservation_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_bi_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_bi_reservation] = mock_rpc - request = {} - client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_bi_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_bi_reservation_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_bi_reservation in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_bi_reservation] = mock_rpc - - request = {} - await client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_bi_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_bi_reservation_async(transport: str = 'grpc_asyncio', request_type=reservation.UpdateBiReservationRequest): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation( - name='name_value', - size=443, - )) - response = await client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = reservation.UpdateBiReservationRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == 'name_value' - assert response.size == 443 - - -@pytest.mark.asyncio -async def test_update_bi_reservation_async_from_dict(): - await test_update_bi_reservation_async(request_type=dict) - -def test_update_bi_reservation_field_headers(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.UpdateBiReservationRequest() - - request.bi_reservation.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - call.return_value = reservation.BiReservation() - client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'bi_reservation.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_bi_reservation_field_headers_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = reservation.UpdateBiReservationRequest() - - request.bi_reservation.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation()) - await client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'bi_reservation.name=name_value', - ) in kw['metadata'] - - -def test_update_bi_reservation_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.BiReservation() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_bi_reservation( - bi_reservation=reservation.BiReservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].bi_reservation - mock_val = reservation.BiReservation(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_bi_reservation_flattened_error(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_bi_reservation( - reservation.UpdateBiReservationRequest(), - bi_reservation=reservation.BiReservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_bi_reservation_flattened_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = reservation.BiReservation() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_bi_reservation( - bi_reservation=reservation.BiReservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].bi_reservation - mock_val = reservation.BiReservation(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_bi_reservation_flattened_error_async(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_bi_reservation( - reservation.UpdateBiReservationRequest(), - bi_reservation=reservation.BiReservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_create_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_reservation] = mock_rpc - - request = {} - client.create_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_reservation_rest_required_fields(request_type=gcbr_reservation.CreateReservationRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_reservation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("reservation_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_reservation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("reservationId", )) & set(("parent", ))) - - -def test_create_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - reservation=gcbr_reservation.Reservation(name='name_value'), - reservation_id='reservation_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_reservation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/reservations" % client.transport._host, args[1]) - - -def test_create_reservation_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_reservation( - gcbr_reservation.CreateReservationRequest(), - parent='parent_value', - reservation=gcbr_reservation.Reservation(name='name_value'), - reservation_id='reservation_id_value', - ) - - -def test_list_reservations_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_reservations in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_reservations] = mock_rpc - - request = {} - client.list_reservations(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_reservations(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_reservations_rest_required_fields(request_type=reservation.ListReservationsRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_reservations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_reservations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.ListReservationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListReservationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_reservations(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_reservations_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_reservations._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_reservations_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListReservationsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.ListReservationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_reservations(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/reservations" % client.transport._host, args[1]) - - -def test_list_reservations_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_reservations( - reservation.ListReservationsRequest(), - parent='parent_value', - ) - - -def test_list_reservations_rest_pager(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - reservation.Reservation(), - ], - next_page_token='abc', - ), - reservation.ListReservationsResponse( - reservations=[], - next_page_token='def', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - ], - next_page_token='ghi', - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(reservation.ListReservationsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_reservations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Reservation) - for i in results) - - pages = list(client.list_reservations(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_reservation] = mock_rpc - - request = {} - client.get_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_reservation_rest_required_fields(request_type=reservation.GetReservationRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_reservation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_reservation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/reservations/*}" % client.transport._host, args[1]) - - -def test_get_reservation_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_reservation( - reservation.GetReservationRequest(), - name='name_value', - ) - - -def test_delete_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_reservation] = mock_rpc - - request = {} - client.delete_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_reservation_rest_required_fields(request_type=reservation.DeleteReservationRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_reservation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_reservation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/reservations/*}" % client.transport._host, args[1]) - - -def test_delete_reservation_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_reservation( - reservation.DeleteReservationRequest(), - name='name_value', - ) - - -def test_update_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_reservation] = mock_rpc - - request = {} - client.update_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation() - - # get arguments that satisfy an http rule for this method - sample_request = {'reservation': {'name': 'projects/sample1/locations/sample2/reservations/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - reservation=gcbr_reservation.Reservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_reservation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{reservation.name=projects/*/locations/*/reservations/*}" % client.transport._host, args[1]) - - -def test_update_reservation_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_reservation( - gcbr_reservation.UpdateReservationRequest(), - reservation=gcbr_reservation.Reservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_failover_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.failover_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.failover_reservation] = mock_rpc - - request = {} - client.failover_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.failover_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_failover_reservation_rest_required_fields(request_type=reservation.FailoverReservationRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.failover_reservation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_failover_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.failover_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_create_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_capacity_commitment] = mock_rpc - - request = {} - client.create_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_capacity_commitment_rest_required_fields(request_type=reservation.CreateCapacityCommitmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_capacity_commitment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("capacity_commitment_id", "enforce_single_admin_project_per_org", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_capacity_commitment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_capacity_commitment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("capacityCommitmentId", "enforceSingleAdminProjectPerOrg", )) & set(("parent", ))) - - -def test_create_capacity_commitment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_capacity_commitment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/capacityCommitments" % client.transport._host, args[1]) - - -def test_create_capacity_commitment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_capacity_commitment( - reservation.CreateCapacityCommitmentRequest(), - parent='parent_value', - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - ) - - -def test_list_capacity_commitments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_capacity_commitments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_capacity_commitments] = mock_rpc - - request = {} - client.list_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_capacity_commitments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_capacity_commitments_rest_required_fields(request_type=reservation.ListCapacityCommitmentsRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_capacity_commitments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_capacity_commitments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.ListCapacityCommitmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_capacity_commitments(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_capacity_commitments_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_capacity_commitments._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_capacity_commitments_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListCapacityCommitmentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_capacity_commitments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/capacityCommitments" % client.transport._host, args[1]) - - -def test_list_capacity_commitments_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_capacity_commitments( - reservation.ListCapacityCommitmentsRequest(), - parent='parent_value', - ) - - -def test_list_capacity_commitments_rest_pager(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - next_page_token='abc', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[], - next_page_token='def', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - ], - next_page_token='ghi', - ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(reservation.ListCapacityCommitmentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_capacity_commitments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.CapacityCommitment) - for i in results) - - pages = list(client.list_capacity_commitments(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_get_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_capacity_commitment] = mock_rpc - - request = {} - client.get_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_capacity_commitment_rest_required_fields(request_type=reservation.GetCapacityCommitmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_capacity_commitment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_capacity_commitment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_capacity_commitment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_capacity_commitment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" % client.transport._host, args[1]) - - -def test_get_capacity_commitment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_capacity_commitment( - reservation.GetCapacityCommitmentRequest(), - name='name_value', - ) - - -def test_delete_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_capacity_commitment] = mock_rpc - - request = {} - client.delete_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_capacity_commitment_rest_required_fields(request_type=reservation.DeleteCapacityCommitmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_capacity_commitment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_capacity_commitment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_capacity_commitment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force", )) & set(("name", ))) - - -def test_delete_capacity_commitment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_capacity_commitment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" % client.transport._host, args[1]) - - -def test_delete_capacity_commitment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_capacity_commitment( - reservation.DeleteCapacityCommitmentRequest(), - name='name_value', - ) - - -def test_update_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_capacity_commitment] = mock_rpc - - request = {} - client.update_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_capacity_commitment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - - # get arguments that satisfy an http rule for this method - sample_request = {'capacity_commitment': {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'}} - - # get truthy value for each flattened field - mock_args = dict( - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_capacity_commitment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{capacity_commitment.name=projects/*/locations/*/capacityCommitments/*}" % client.transport._host, args[1]) - - -def test_update_capacity_commitment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_capacity_commitment( - reservation.UpdateCapacityCommitmentRequest(), - capacity_commitment=reservation.CapacityCommitment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_split_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.split_capacity_commitment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.split_capacity_commitment] = mock_rpc - - request = {} - client.split_capacity_commitment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.split_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_split_capacity_commitment_rest_required_fields(request_type=reservation.SplitCapacityCommitmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).split_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).split_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.SplitCapacityCommitmentResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.split_capacity_commitment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_split_capacity_commitment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.split_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_split_capacity_commitment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.SplitCapacityCommitmentResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - slot_count=1098, - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.split_capacity_commitment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/capacityCommitments/*}:split" % client.transport._host, args[1]) - - -def test_split_capacity_commitment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.split_capacity_commitment( - reservation.SplitCapacityCommitmentRequest(), - name='name_value', - slot_count=1098, - ) - - -def test_merge_capacity_commitments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.merge_capacity_commitments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.merge_capacity_commitments] = mock_rpc - - request = {} - client.merge_capacity_commitments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.merge_capacity_commitments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_merge_capacity_commitments_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - capacity_commitment_ids=['capacity_commitment_ids_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.merge_capacity_commitments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/capacityCommitments:merge" % client.transport._host, args[1]) - - -def test_merge_capacity_commitments_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.merge_capacity_commitments( - reservation.MergeCapacityCommitmentsRequest(), - parent='parent_value', - capacity_commitment_ids=['capacity_commitment_ids_value'], - ) - - -def test_create_assignment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_assignment] = mock_rpc - - request = {} - client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_assignment_rest_required_fields(request_type=reservation.CreateAssignmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_assignment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_assignment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("assignment_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_assignment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_assignment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_assignment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assignmentId", )) & set(("parent", ))) - - -def test_create_assignment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - assignment=reservation.Assignment(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_assignment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" % client.transport._host, args[1]) - - -def test_create_assignment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_assignment( - reservation.CreateAssignmentRequest(), - parent='parent_value', - assignment=reservation.Assignment(name='name_value'), - ) - - -def test_list_assignments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_assignments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_assignments] = mock_rpc - - request = {} - client.list_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_assignments_rest_required_fields(request_type=reservation.ListAssignmentsRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assignments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assignments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.ListAssignmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_assignments(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_assignments_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_assignments._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -def test_list_assignments_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListAssignmentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.ListAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_assignments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" % client.transport._host, args[1]) - - -def test_list_assignments_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assignments( - reservation.ListAssignmentsRequest(), - parent='parent_value', - ) - - -def test_list_assignments_rest_pager(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.ListAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(reservation.ListAssignmentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - - pager = client.list_assignments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in results) - - pages = list(client.list_assignments(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_assignment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_assignment] = mock_rpc - - request = {} - client.delete_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_assignment_rest_required_fields(request_type=reservation.DeleteAssignmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_assignment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_assignment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_assignment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_assignment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_assignment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_assignment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_assignment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}" % client.transport._host, args[1]) - - -def test_delete_assignment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_assignment( - reservation.DeleteAssignmentRequest(), - name='name_value', - ) - - -def test_search_assignments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_assignments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_assignments] = mock_rpc - - request = {} - client.search_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_assignments_rest_required_fields(request_type=reservation.SearchAssignmentsRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_assignments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_assignments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "query", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.SearchAssignmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SearchAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_assignments(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_assignments_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_assignments._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "query", )) & set(("parent", ))) - - -def test_search_assignments_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.SearchAssignmentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - query='query_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.SearchAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_assignments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}:searchAssignments" % client.transport._host, args[1]) - - -def test_search_assignments_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_assignments( - reservation.SearchAssignmentsRequest(), - parent='parent_value', - query='query_value', - ) - - -def test_search_assignments_rest_pager(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(reservation.SearchAssignmentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.search_assignments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in results) - - pages = list(client.search_assignments(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_search_all_assignments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.search_all_assignments in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.search_all_assignments] = mock_rpc - - request = {} - client.search_all_assignments(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.search_all_assignments(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_search_all_assignments_rest_required_fields(request_type=reservation.SearchAllAssignmentsRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_assignments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_assignments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "query", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.SearchAllAssignmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.search_all_assignments(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_search_all_assignments_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.search_all_assignments._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "query", )) & set(("parent", ))) - - -def test_search_all_assignments_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.SearchAllAssignmentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - query='query_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.search_all_assignments(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}:searchAllAssignments" % client.transport._host, args[1]) - - -def test_search_all_assignments_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_assignments( - reservation.SearchAllAssignmentsRequest(), - parent='parent_value', - query='query_value', - ) - - -def test_search_all_assignments_rest_pager(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token='abc', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[], - next_page_token='def', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token='ghi', - ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(reservation.SearchAllAssignmentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.search_all_assignments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) - for i in results) - - pages = list(client.search_all_assignments(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_move_assignment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.move_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.move_assignment] = mock_rpc - - request = {} - client.move_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.move_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_move_assignment_rest_required_fields(request_type=reservation.MoveAssignmentRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_assignment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).move_assignment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.move_assignment(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_move_assignment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.move_assignment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_move_assignment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - destination_id='destination_id_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.move_assignment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}:move" % client.transport._host, args[1]) - - -def test_move_assignment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.move_assignment( - reservation.MoveAssignmentRequest(), - name='name_value', - destination_id='destination_id_value', - ) - - -def test_update_assignment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_assignment] = mock_rpc - - request = {} - client.update_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_assignment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() - - # get arguments that satisfy an http rule for this method - sample_request = {'assignment': {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'}} - - # get truthy value for each flattened field - mock_args = dict( - assignment=reservation.Assignment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_assignment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{assignment.name=projects/*/locations/*/reservations/*/assignments/*}" % client.transport._host, args[1]) - - -def test_update_assignment_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_assignment( - reservation.UpdateAssignmentRequest(), - assignment=reservation.Assignment(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_bi_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_bi_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_bi_reservation] = mock_rpc - - request = {} - client.get_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_bi_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_bi_reservation_rest_required_fields(request_type=reservation.GetBiReservationRequest): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bi_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bi_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_bi_reservation(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_bi_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_bi_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_bi_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/biReservation'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_bi_reservation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/biReservation}" % client.transport._host, args[1]) - - -def test_get_bi_reservation_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_bi_reservation( - reservation.GetBiReservationRequest(), - name='name_value', - ) - - -def test_update_bi_reservation_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_bi_reservation in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_bi_reservation] = mock_rpc - - request = {} - client.update_bi_reservation(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_bi_reservation(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_bi_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation() - - # get arguments that satisfy an http rule for this method - sample_request = {'bi_reservation': {'name': 'projects/sample1/locations/sample2/biReservation'}} - - # get truthy value for each flattened field - mock_args = dict( - bi_reservation=reservation.BiReservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_bi_reservation(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{bi_reservation.name=projects/*/locations/*/biReservation}" % client.transport._host, args[1]) - - -def test_update_bi_reservation_rest_flattened_error(transport: str = 'rest'): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_bi_reservation( - reservation.UpdateBiReservationRequest(), - bi_reservation=reservation.BiReservation(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ReservationServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ReservationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.ReservationServiceGrpcTransport, - transports.ReservationServiceGrpcAsyncIOTransport, - transports.ReservationServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = ReservationServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - call.return_value = gcbr_reservation.Reservation() - client.create_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.CreateReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_reservations_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - call.return_value = reservation.ListReservationsResponse() - client.list_reservations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListReservationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - call.return_value = reservation.Reservation() - client.get_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - call.return_value = None - client.delete_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - call.return_value = gcbr_reservation.Reservation() - client.update_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.UpdateReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_failover_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - call.return_value = reservation.Reservation() - client.failover_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.FailoverReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.create_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_capacity_commitments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - call.return_value = reservation.ListCapacityCommitmentsResponse() - client.list_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListCapacityCommitmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.get_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - call.return_value = None - client.delete_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.update_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_split_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - call.return_value = reservation.SplitCapacityCommitmentResponse() - client.split_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SplitCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_merge_capacity_commitments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - call.return_value = reservation.CapacityCommitment() - client.merge_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MergeCapacityCommitmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - call.return_value = reservation.Assignment() - client.create_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assignments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - call.return_value = reservation.ListAssignmentsResponse() - client.list_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - call.return_value = None - client.delete_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_assignments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - call.return_value = reservation.SearchAssignmentsResponse() - client.search_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_assignments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - call.return_value = reservation.SearchAllAssignmentsResponse() - client.search_all_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAllAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - call.return_value = reservation.Assignment() - client.move_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MoveAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - call.return_value = reservation.Assignment() - client.update_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_bi_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - call.return_value = reservation.BiReservation() - client.get_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetBiReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_bi_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - call.return_value = reservation.BiReservation() - client.update_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateBiReservationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = ReservationServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - await client.create_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.CreateReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_reservations_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListReservationsResponse( - next_page_token='next_page_token_value', - )) - await client.list_reservations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListReservationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - await client.get_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - await client.update_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.UpdateReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_failover_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - )) - await client.failover_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.FailoverReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - await client.create_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_capacity_commitments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListCapacityCommitmentsResponse( - next_page_token='next_page_token_value', - )) - await client.list_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListCapacityCommitmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - await client.get_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - await client.update_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_split_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SplitCapacityCommitmentResponse( - )) - await client.split_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SplitCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_merge_capacity_commitments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - )) - await client.merge_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MergeCapacityCommitmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - )) - await client.create_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_assignments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.ListAssignmentsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_assignments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAssignmentsResponse( - next_page_token='next_page_token_value', - )) - await client.search_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_all_assignments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.SearchAllAssignmentsResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAllAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_move_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - )) - await client.move_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MoveAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - )) - await client.update_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_bi_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation( - name='name_value', - size=443, - )) - await client.get_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetBiReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_bi_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(reservation.BiReservation( - name='name_value', - size=443, - )) - await client.update_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateBiReservationRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = ReservationServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_reservation_rest_bad_request(request_type=gcbr_reservation.CreateReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - gcbr_reservation.CreateReservationRequest, - dict, -]) -def test_create_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["reservation"] = {'name': 'name_value', 'slot_capacity': 1391, 'ignore_idle_slots': True, 'autoscale': {'current_slots': 1431, 'max_slots': 986}, 'concurrency': 1195, 'creation_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'multi_region_auxiliary': True, 'edition': 1, 'primary_location': 'primary_location_value', 'secondary_location': 'secondary_location_value', 'original_primary_location': 'original_primary_location_value', 'replication_status': {'error': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'last_error_time': {}, 'last_replication_time': {}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcbr_reservation.CreateReservationRequest.meta.fields["reservation"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["reservation"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["reservation"][field])): - del request_init["reservation"][field][i][subfield] - else: - del request_init["reservation"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_reservation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_create_reservation") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_create_reservation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_create_reservation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcbr_reservation.CreateReservationRequest.pb(gcbr_reservation.CreateReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcbr_reservation.Reservation.to_json(gcbr_reservation.Reservation()) - req.return_value.content = return_value - - request = gcbr_reservation.CreateReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcbr_reservation.Reservation() - post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata - - client.create_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_reservations_rest_bad_request(request_type=reservation.ListReservationsRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_reservations(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.ListReservationsRequest, - dict, -]) -def test_list_reservations_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListReservationsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListReservationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_reservations(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReservationsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_reservations_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_list_reservations") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_list_reservations_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_list_reservations") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.ListReservationsRequest.pb(reservation.ListReservationsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.ListReservationsResponse.to_json(reservation.ListReservationsResponse()) - req.return_value.content = return_value - - request = reservation.ListReservationsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.ListReservationsResponse() - post_with_metadata.return_value = reservation.ListReservationsResponse(), metadata - - client.list_reservations(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_reservation_rest_bad_request(request_type=reservation.GetReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.GetReservationRequest, - dict, -]) -def test_get_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_reservation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_get_reservation") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_get_reservation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_get_reservation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.GetReservationRequest.pb(reservation.GetReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Reservation.to_json(reservation.Reservation()) - req.return_value.content = return_value - - request = reservation.GetReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.Reservation() - post_with_metadata.return_value = reservation.Reservation(), metadata - - client.get_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_reservation_rest_bad_request(request_type=reservation.DeleteReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.DeleteReservationRequest, - dict, -]) -def test_delete_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_reservation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_delete_reservation") as pre: - pre.assert_not_called() - pb_message = reservation.DeleteReservationRequest.pb(reservation.DeleteReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = reservation.DeleteReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_update_reservation_rest_bad_request(request_type=gcbr_reservation.UpdateReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'reservation': {'name': 'projects/sample1/locations/sample2/reservations/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - gcbr_reservation.UpdateReservationRequest, - dict, -]) -def test_update_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'reservation': {'name': 'projects/sample1/locations/sample2/reservations/sample3'}} - request_init["reservation"] = {'name': 'projects/sample1/locations/sample2/reservations/sample3', 'slot_capacity': 1391, 'ignore_idle_slots': True, 'autoscale': {'current_slots': 1431, 'max_slots': 986}, 'concurrency': 1195, 'creation_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'multi_region_auxiliary': True, 'edition': 1, 'primary_location': 'primary_location_value', 'secondary_location': 'secondary_location_value', 'original_primary_location': 'original_primary_location_value', 'replication_status': {'error': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'last_error_time': {}, 'last_replication_time': {}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = gcbr_reservation.UpdateReservationRequest.meta.fields["reservation"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["reservation"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["reservation"][field])): - del request_init["reservation"][field][i][subfield] - else: - del request_init["reservation"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_reservation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_reservation") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_reservation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_update_reservation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = gcbr_reservation.UpdateReservationRequest.pb(gcbr_reservation.UpdateReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcbr_reservation.Reservation.to_json(gcbr_reservation.Reservation()) - req.return_value.content = return_value - - request = gcbr_reservation.UpdateReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gcbr_reservation.Reservation() - post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata - - client.update_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_failover_reservation_rest_bad_request(request_type=reservation.FailoverReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.failover_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.FailoverReservationRequest, - dict, -]) -def test_failover_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation( - name='name_value', - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location='primary_location_value', - secondary_location='secondary_location_value', - original_primary_location='original_primary_location_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.failover_reservation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == 'name_value' - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == 'primary_location_value' - assert response.secondary_location == 'secondary_location_value' - assert response.original_primary_location == 'original_primary_location_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_failover_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_failover_reservation") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_failover_reservation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_failover_reservation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.FailoverReservationRequest.pb(reservation.FailoverReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Reservation.to_json(reservation.Reservation()) - req.return_value.content = return_value - - request = reservation.FailoverReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.Reservation() - post_with_metadata.return_value = reservation.Reservation(), metadata - - client.failover_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_capacity_commitment_rest_bad_request(request_type=reservation.CreateCapacityCommitmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_capacity_commitment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.CreateCapacityCommitmentRequest, - dict, -]) -def test_create_capacity_commitment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["capacity_commitment"] = {'name': 'name_value', 'slot_count': 1098, 'plan': 3, 'state': 1, 'commitment_start_time': {'seconds': 751, 'nanos': 543}, 'commitment_end_time': {}, 'failure_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'renewal_plan': 3, 'multi_region_auxiliary': True, 'edition': 1, 'is_flat_rate': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.CreateCapacityCommitmentRequest.meta.fields["capacity_commitment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["capacity_commitment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["capacity_commitment"][field])): - del request_init["capacity_commitment"][field][i][subfield] - else: - del request_init["capacity_commitment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_capacity_commitment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_capacity_commitment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_create_capacity_commitment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_create_capacity_commitment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_create_capacity_commitment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.CreateCapacityCommitmentRequest.pb(reservation.CreateCapacityCommitmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json(reservation.CapacityCommitment()) - req.return_value.content = return_value - - request = reservation.CreateCapacityCommitmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - - client.create_capacity_commitment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_capacity_commitments_rest_bad_request(request_type=reservation.ListCapacityCommitmentsRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_capacity_commitments(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.ListCapacityCommitmentsRequest, - dict, -]) -def test_list_capacity_commitments_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListCapacityCommitmentsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_capacity_commitments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCapacityCommitmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_capacity_commitments_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_list_capacity_commitments") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_list_capacity_commitments_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_list_capacity_commitments") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.ListCapacityCommitmentsRequest.pb(reservation.ListCapacityCommitmentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.ListCapacityCommitmentsResponse.to_json(reservation.ListCapacityCommitmentsResponse()) - req.return_value.content = return_value - - request = reservation.ListCapacityCommitmentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.ListCapacityCommitmentsResponse() - post_with_metadata.return_value = reservation.ListCapacityCommitmentsResponse(), metadata - - client.list_capacity_commitments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_capacity_commitment_rest_bad_request(request_type=reservation.GetCapacityCommitmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_capacity_commitment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.GetCapacityCommitmentRequest, - dict, -]) -def test_get_capacity_commitment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_capacity_commitment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_capacity_commitment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_get_capacity_commitment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_get_capacity_commitment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_get_capacity_commitment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.GetCapacityCommitmentRequest.pb(reservation.GetCapacityCommitmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json(reservation.CapacityCommitment()) - req.return_value.content = return_value - - request = reservation.GetCapacityCommitmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - - client.get_capacity_commitment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_capacity_commitment_rest_bad_request(request_type=reservation.DeleteCapacityCommitmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_capacity_commitment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.DeleteCapacityCommitmentRequest, - dict, -]) -def test_delete_capacity_commitment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_capacity_commitment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_capacity_commitment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_delete_capacity_commitment") as pre: - pre.assert_not_called() - pb_message = reservation.DeleteCapacityCommitmentRequest.pb(reservation.DeleteCapacityCommitmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = reservation.DeleteCapacityCommitmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_capacity_commitment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_update_capacity_commitment_rest_bad_request(request_type=reservation.UpdateCapacityCommitmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'capacity_commitment': {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_capacity_commitment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.UpdateCapacityCommitmentRequest, - dict, -]) -def test_update_capacity_commitment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'capacity_commitment': {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'}} - request_init["capacity_commitment"] = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3', 'slot_count': 1098, 'plan': 3, 'state': 1, 'commitment_start_time': {'seconds': 751, 'nanos': 543}, 'commitment_end_time': {}, 'failure_status': {'code': 411, 'message': 'message_value', 'details': [{'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}]}, 'renewal_plan': 3, 'multi_region_auxiliary': True, 'edition': 1, 'is_flat_rate': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.UpdateCapacityCommitmentRequest.meta.fields["capacity_commitment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["capacity_commitment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["capacity_commitment"][field])): - del request_init["capacity_commitment"][field][i][subfield] - else: - del request_init["capacity_commitment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_capacity_commitment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_capacity_commitment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_capacity_commitment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_capacity_commitment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_update_capacity_commitment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.UpdateCapacityCommitmentRequest.pb(reservation.UpdateCapacityCommitmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json(reservation.CapacityCommitment()) - req.return_value.content = return_value - - request = reservation.UpdateCapacityCommitmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - - client.update_capacity_commitment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_split_capacity_commitment_rest_bad_request(request_type=reservation.SplitCapacityCommitmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.split_capacity_commitment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.SplitCapacityCommitmentRequest, - dict, -]) -def test_split_capacity_commitment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/capacityCommitments/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.SplitCapacityCommitmentResponse( - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.split_capacity_commitment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.SplitCapacityCommitmentResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_split_capacity_commitment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_split_capacity_commitment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_split_capacity_commitment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_split_capacity_commitment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.SplitCapacityCommitmentRequest.pb(reservation.SplitCapacityCommitmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.SplitCapacityCommitmentResponse.to_json(reservation.SplitCapacityCommitmentResponse()) - req.return_value.content = return_value - - request = reservation.SplitCapacityCommitmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.SplitCapacityCommitmentResponse() - post_with_metadata.return_value = reservation.SplitCapacityCommitmentResponse(), metadata - - client.split_capacity_commitment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_merge_capacity_commitments_rest_bad_request(request_type=reservation.MergeCapacityCommitmentsRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.merge_capacity_commitments(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.MergeCapacityCommitmentsRequest, - dict, -]) -def test_merge_capacity_commitments_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( - name='name_value', - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.merge_capacity_commitments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == 'name_value' - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_merge_capacity_commitments_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_merge_capacity_commitments") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_merge_capacity_commitments_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_merge_capacity_commitments") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.MergeCapacityCommitmentsRequest.pb(reservation.MergeCapacityCommitmentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json(reservation.CapacityCommitment()) - req.return_value.content = return_value - - request = reservation.MergeCapacityCommitmentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - - client.merge_capacity_commitments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_assignment_rest_bad_request(request_type=reservation.CreateAssignmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_assignment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.CreateAssignmentRequest, - dict, -]) -def test_create_assignment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - request_init["assignment"] = {'name': 'name_value', 'assignee': 'assignee_value', 'job_type': 1, 'state': 1, 'enable_gemini_in_bigquery': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.CreateAssignmentRequest.meta.fields["assignment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["assignment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["assignment"][field])): - del request_init["assignment"][field][i][subfield] - else: - del request_init["assignment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_assignment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_assignment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_create_assignment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_create_assignment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_create_assignment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.CreateAssignmentRequest.pb(reservation.CreateAssignmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Assignment.to_json(reservation.Assignment()) - req.return_value.content = return_value - - request = reservation.CreateAssignmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.Assignment() - post_with_metadata.return_value = reservation.Assignment(), metadata - - client.create_assignment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_assignments_rest_bad_request(request_type=reservation.ListAssignmentsRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_assignments(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.ListAssignmentsRequest, - dict, -]) -def test_list_assignments_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2/reservations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListAssignmentsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_assignments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssignmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assignments_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_list_assignments") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_list_assignments_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_list_assignments") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.ListAssignmentsRequest.pb(reservation.ListAssignmentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.ListAssignmentsResponse.to_json(reservation.ListAssignmentsResponse()) - req.return_value.content = return_value - - request = reservation.ListAssignmentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.ListAssignmentsResponse() - post_with_metadata.return_value = reservation.ListAssignmentsResponse(), metadata - - client.list_assignments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_assignment_rest_bad_request(request_type=reservation.DeleteAssignmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_assignment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.DeleteAssignmentRequest, - dict, -]) -def test_delete_assignment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_assignment(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_assignment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_delete_assignment") as pre: - pre.assert_not_called() - pb_message = reservation.DeleteAssignmentRequest.pb(reservation.DeleteAssignmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = reservation.DeleteAssignmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_assignment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_search_assignments_rest_bad_request(request_type=reservation.SearchAssignmentsRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_assignments(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.SearchAssignmentsRequest, - dict, -]) -def test_search_assignments_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.SearchAssignmentsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SearchAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_assignments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAssignmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_assignments_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_search_assignments") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_search_assignments_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_search_assignments") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.SearchAssignmentsRequest.pb(reservation.SearchAssignmentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.SearchAssignmentsResponse.to_json(reservation.SearchAssignmentsResponse()) - req.return_value.content = return_value - - request = reservation.SearchAssignmentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.SearchAssignmentsResponse() - post_with_metadata.return_value = reservation.SearchAssignmentsResponse(), metadata - - client.search_assignments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_search_all_assignments_rest_bad_request(request_type=reservation.SearchAllAssignmentsRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_assignments(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.SearchAllAssignmentsRequest, - dict, -]) -def test_search_all_assignments_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.SearchAllAssignmentsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_assignments(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllAssignmentsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_assignments_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_search_all_assignments") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_search_all_assignments_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_search_all_assignments") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.SearchAllAssignmentsRequest.pb(reservation.SearchAllAssignmentsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.SearchAllAssignmentsResponse.to_json(reservation.SearchAllAssignmentsResponse()) - req.return_value.content = return_value - - request = reservation.SearchAllAssignmentsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.SearchAllAssignmentsResponse() - post_with_metadata.return_value = reservation.SearchAllAssignmentsResponse(), metadata - - client.search_all_assignments(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_move_assignment_rest_bad_request(request_type=reservation.MoveAssignmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.move_assignment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.MoveAssignmentRequest, - dict, -]) -def test_move_assignment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.move_assignment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_move_assignment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_move_assignment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_move_assignment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_move_assignment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.MoveAssignmentRequest.pb(reservation.MoveAssignmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Assignment.to_json(reservation.Assignment()) - req.return_value.content = return_value - - request = reservation.MoveAssignmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.Assignment() - post_with_metadata.return_value = reservation.Assignment(), metadata - - client.move_assignment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_assignment_rest_bad_request(request_type=reservation.UpdateAssignmentRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'assignment': {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_assignment(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.UpdateAssignmentRequest, - dict, -]) -def test_update_assignment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'assignment': {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4'}} - request_init["assignment"] = {'name': 'projects/sample1/locations/sample2/reservations/sample3/assignments/sample4', 'assignee': 'assignee_value', 'job_type': 1, 'state': 1, 'enable_gemini_in_bigquery': True} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.UpdateAssignmentRequest.meta.fields["assignment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["assignment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["assignment"][field])): - del request_init["assignment"][field][i][subfield] - else: - del request_init["assignment"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment( - name='name_value', - assignee='assignee_value', - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_assignment(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == 'name_value' - assert response.assignee == 'assignee_value' - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_assignment_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_assignment") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_assignment_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_update_assignment") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.UpdateAssignmentRequest.pb(reservation.UpdateAssignmentRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Assignment.to_json(reservation.Assignment()) - req.return_value.content = return_value - - request = reservation.UpdateAssignmentRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.Assignment() - post_with_metadata.return_value = reservation.Assignment(), metadata - - client.update_assignment(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_bi_reservation_rest_bad_request(request_type=reservation.GetBiReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/biReservation'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_bi_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.GetBiReservationRequest, - dict, -]) -def test_get_bi_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/biReservation'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation( - name='name_value', - size=443, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_bi_reservation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == 'name_value' - assert response.size == 443 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_bi_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_get_bi_reservation") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_get_bi_reservation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_get_bi_reservation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.GetBiReservationRequest.pb(reservation.GetBiReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.BiReservation.to_json(reservation.BiReservation()) - req.return_value.content = return_value - - request = reservation.GetBiReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.BiReservation() - post_with_metadata.return_value = reservation.BiReservation(), metadata - - client.get_bi_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_bi_reservation_rest_bad_request(request_type=reservation.UpdateBiReservationRequest): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'bi_reservation': {'name': 'projects/sample1/locations/sample2/biReservation'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_bi_reservation(request) - - -@pytest.mark.parametrize("request_type", [ - reservation.UpdateBiReservationRequest, - dict, -]) -def test_update_bi_reservation_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'bi_reservation': {'name': 'projects/sample1/locations/sample2/biReservation'}} - request_init["bi_reservation"] = {'name': 'projects/sample1/locations/sample2/biReservation', 'update_time': {'seconds': 751, 'nanos': 543}, 'size': 443, 'preferred_tables': [{'project_id': 'project_id_value', 'dataset_id': 'dataset_id_value', 'table_id': 'table_id_value'}]} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.UpdateBiReservationRequest.meta.fields["bi_reservation"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["bi_reservation"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["bi_reservation"][field])): - del request_init["bi_reservation"][field][i][subfield] - else: - del request_init["bi_reservation"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation( - name='name_value', - size=443, - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_bi_reservation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == 'name_value' - assert response.size == 443 - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_bi_reservation_rest_interceptors(null_interceptor): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ReservationServiceRestInterceptor(), - ) - client = ReservationServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_bi_reservation") as post, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "post_update_bi_reservation_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.ReservationServiceRestInterceptor, "pre_update_bi_reservation") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = reservation.UpdateBiReservationRequest.pb(reservation.UpdateBiReservationRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.BiReservation.to_json(reservation.BiReservation()) - req.return_value.content = return_value - - request = reservation.UpdateBiReservationRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = reservation.BiReservation() - post_with_metadata.return_value = reservation.BiReservation(), metadata - - client.update_bi_reservation(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - -def test_initialize_client_w_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), - '__call__') as call: - client.create_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.CreateReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_reservations_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), - '__call__') as call: - client.list_reservations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListReservationsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_reservation), - '__call__') as call: - client.get_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), - '__call__') as call: - client.delete_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), - '__call__') as call: - client.update_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.UpdateReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_failover_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), - '__call__') as call: - client.failover_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.FailoverReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_capacity_commitment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), - '__call__') as call: - client.create_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_capacity_commitments_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), - '__call__') as call: - client.list_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListCapacityCommitmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_capacity_commitment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), - '__call__') as call: - client.get_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_capacity_commitment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), - '__call__') as call: - client.delete_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_capacity_commitment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), - '__call__') as call: - client.update_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_split_capacity_commitment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), - '__call__') as call: - client.split_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SplitCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_merge_capacity_commitments_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), - '__call__') as call: - client.merge_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MergeCapacityCommitmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_assignment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), - '__call__') as call: - client.create_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assignments_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_assignments), - '__call__') as call: - client.list_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_assignment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), - '__call__') as call: - client.delete_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_assignments_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), - '__call__') as call: - client.search_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_assignments_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), - '__call__') as call: - client.search_all_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAllAssignmentsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_assignment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.move_assignment), - '__call__') as call: - client.move_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MoveAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_assignment_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), - '__call__') as call: - client.update_assignment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateAssignmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_bi_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), - '__call__') as call: - client.get_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetBiReservationRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_bi_reservation_empty_call_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), - '__call__') as call: - client.update_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateBiReservationRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.ReservationServiceGrpcTransport, - ) - -def test_reservation_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.ReservationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_reservation_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.ReservationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_reservation', - 'list_reservations', - 'get_reservation', - 'delete_reservation', - 'update_reservation', - 'failover_reservation', - 'create_capacity_commitment', - 'list_capacity_commitments', - 'get_capacity_commitment', - 'delete_capacity_commitment', - 'update_capacity_commitment', - 'split_capacity_commitment', - 'merge_capacity_commitments', - 'create_assignment', - 'list_assignments', - 'delete_assignment', - 'search_assignments', - 'search_all_assignments', - 'move_assignment', - 'update_assignment', - 'get_bi_reservation', - 'update_bi_reservation', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_reservation_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ReservationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_reservation_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.bigquery_reservation_v1.services.reservation_service.transports.ReservationServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ReservationServiceTransport() - adc.assert_called_once() - - -def test_reservation_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ReservationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ReservationServiceGrpcTransport, - transports.ReservationServiceGrpcAsyncIOTransport, - ], -) -def test_reservation_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/bigquery', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ReservationServiceGrpcTransport, - transports.ReservationServiceGrpcAsyncIOTransport, - transports.ReservationServiceRestTransport, - ], -) -def test_reservation_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.ReservationServiceGrpcTransport, grpc_helpers), - (transports.ReservationServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_reservation_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "bigqueryreservation.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/bigquery', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="bigqueryreservation.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.ReservationServiceGrpcTransport, transports.ReservationServiceGrpcAsyncIOTransport]) -def test_reservation_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_reservation_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ReservationServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_reservation_service_host_no_port(transport_name): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigqueryreservation.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigqueryreservation.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigqueryreservation.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_reservation_service_host_with_port(transport_name): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='bigqueryreservation.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'bigqueryreservation.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://bigqueryreservation.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_reservation_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ReservationServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = ReservationServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_reservation._session - session2 = client2.transport.create_reservation._session - assert session1 != session2 - session1 = client1.transport.list_reservations._session - session2 = client2.transport.list_reservations._session - assert session1 != session2 - session1 = client1.transport.get_reservation._session - session2 = client2.transport.get_reservation._session - assert session1 != session2 - session1 = client1.transport.delete_reservation._session - session2 = client2.transport.delete_reservation._session - assert session1 != session2 - session1 = client1.transport.update_reservation._session - session2 = client2.transport.update_reservation._session - assert session1 != session2 - session1 = client1.transport.failover_reservation._session - session2 = client2.transport.failover_reservation._session - assert session1 != session2 - session1 = client1.transport.create_capacity_commitment._session - session2 = client2.transport.create_capacity_commitment._session - assert session1 != session2 - session1 = client1.transport.list_capacity_commitments._session - session2 = client2.transport.list_capacity_commitments._session - assert session1 != session2 - session1 = client1.transport.get_capacity_commitment._session - session2 = client2.transport.get_capacity_commitment._session - assert session1 != session2 - session1 = client1.transport.delete_capacity_commitment._session - session2 = client2.transport.delete_capacity_commitment._session - assert session1 != session2 - session1 = client1.transport.update_capacity_commitment._session - session2 = client2.transport.update_capacity_commitment._session - assert session1 != session2 - session1 = client1.transport.split_capacity_commitment._session - session2 = client2.transport.split_capacity_commitment._session - assert session1 != session2 - session1 = client1.transport.merge_capacity_commitments._session - session2 = client2.transport.merge_capacity_commitments._session - assert session1 != session2 - session1 = client1.transport.create_assignment._session - session2 = client2.transport.create_assignment._session - assert session1 != session2 - session1 = client1.transport.list_assignments._session - session2 = client2.transport.list_assignments._session - assert session1 != session2 - session1 = client1.transport.delete_assignment._session - session2 = client2.transport.delete_assignment._session - assert session1 != session2 - session1 = client1.transport.search_assignments._session - session2 = client2.transport.search_assignments._session - assert session1 != session2 - session1 = client1.transport.search_all_assignments._session - session2 = client2.transport.search_all_assignments._session - assert session1 != session2 - session1 = client1.transport.move_assignment._session - session2 = client2.transport.move_assignment._session - assert session1 != session2 - session1 = client1.transport.update_assignment._session - session2 = client2.transport.update_assignment._session - assert session1 != session2 - session1 = client1.transport.get_bi_reservation._session - session2 = client2.transport.get_bi_reservation._session - assert session1 != session2 - session1 = client1.transport.update_bi_reservation._session - session2 = client2.transport.update_bi_reservation._session - assert session1 != session2 -def test_reservation_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ReservationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_reservation_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.ReservationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ReservationServiceGrpcTransport, transports.ReservationServiceGrpcAsyncIOTransport]) -def test_reservation_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.ReservationServiceGrpcTransport, transports.ReservationServiceGrpcAsyncIOTransport]) -def test_reservation_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_assignment_path(): - project = "squid" - location = "clam" - reservation = "whelk" - assignment = "octopus" - expected = "projects/{project}/locations/{location}/reservations/{reservation}/assignments/{assignment}".format(project=project, location=location, reservation=reservation, assignment=assignment, ) - actual = ReservationServiceClient.assignment_path(project, location, reservation, assignment) - assert expected == actual - - -def test_parse_assignment_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - "reservation": "cuttlefish", - "assignment": "mussel", - } - path = ReservationServiceClient.assignment_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_assignment_path(path) - assert expected == actual - -def test_bi_reservation_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}/biReservation".format(project=project, location=location, ) - actual = ReservationServiceClient.bi_reservation_path(project, location) - assert expected == actual - - -def test_parse_bi_reservation_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ReservationServiceClient.bi_reservation_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_bi_reservation_path(path) - assert expected == actual - -def test_capacity_commitment_path(): - project = "squid" - location = "clam" - capacity_commitment = "whelk" - expected = "projects/{project}/locations/{location}/capacityCommitments/{capacity_commitment}".format(project=project, location=location, capacity_commitment=capacity_commitment, ) - actual = ReservationServiceClient.capacity_commitment_path(project, location, capacity_commitment) - assert expected == actual - - -def test_parse_capacity_commitment_path(): - expected = { - "project": "octopus", - "location": "oyster", - "capacity_commitment": "nudibranch", - } - path = ReservationServiceClient.capacity_commitment_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_capacity_commitment_path(path) - assert expected == actual - -def test_reservation_path(): - project = "cuttlefish" - location = "mussel" - reservation = "winkle" - expected = "projects/{project}/locations/{location}/reservations/{reservation}".format(project=project, location=location, reservation=reservation, ) - actual = ReservationServiceClient.reservation_path(project, location, reservation) - assert expected == actual - - -def test_parse_reservation_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "reservation": "abalone", - } - path = ReservationServiceClient.reservation_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_reservation_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = ReservationServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = ReservationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = ReservationServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = ReservationServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = ReservationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = ReservationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = ReservationServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = ReservationServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = ReservationServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = ReservationServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = ReservationServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.ReservationServiceTransport, '_prep_wrapped_messages') as prep: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.ReservationServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = ReservationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (ReservationServiceClient, transports.ReservationServiceGrpcTransport), - (ReservationServiceAsyncClient, transports.ReservationServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/.coveragerc b/owl-bot-staging/google-cloud-billing-budgets/v1/.coveragerc deleted file mode 100644 index cbbb98d2731b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/billing/budgets/__init__.py - google/cloud/billing/budgets/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/.flake8 b/owl-bot-staging/google-cloud-billing-budgets/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/MANIFEST.in b/owl-bot-staging/google-cloud-billing-budgets/v1/MANIFEST.in deleted file mode 100644 index 6af2c8959793..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/billing/budgets *.py -recursive-include google/cloud/billing/budgets_v1 *.py diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/README.rst b/owl-bot-staging/google-cloud-billing-budgets/v1/README.rst deleted file mode 100644 index a896ef3c45ba..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Billing Budgets API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Billing Budgets API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/_static/custom.css b/owl-bot-staging/google-cloud-billing-budgets/v1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/budget_service.rst b/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/budget_service.rst deleted file mode 100644 index 29d54ca5de8e..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/budget_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -BudgetService -------------------------------- - -.. automodule:: google.cloud.billing.budgets_v1.services.budget_service - :members: - :inherited-members: - -.. automodule:: google.cloud.billing.budgets_v1.services.budget_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/services_.rst b/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/services_.rst deleted file mode 100644 index 864c9aabe6e0..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Billing Budgets v1 API -================================================ -.. toctree:: - :maxdepth: 2 - - budget_service diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/types_.rst b/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/types_.rst deleted file mode 100644 index 632739cc17d6..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/budgets_v1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Billing Budgets v1 API -============================================= - -.. automodule:: google.cloud.billing.budgets_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/conf.py b/owl-bot-staging/google-cloud-billing-budgets/v1/docs/conf.py deleted file mode 100644 index 7299e0cc4153..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-billing-budgets documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-billing-budgets" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Billing Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-billing-budgets-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-billing-budgets.tex", - u"google-cloud-billing-budgets Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-billing-budgets", - u"Google Cloud Billing Budgets Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-billing-budgets", - u"google-cloud-billing-budgets Documentation", - author, - "google-cloud-billing-budgets", - "GAPIC library for Google Cloud Billing Budgets API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/index.rst b/owl-bot-staging/google-cloud-billing-budgets/v1/docs/index.rst deleted file mode 100644 index b793e2f42d28..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - budgets_v1/services_ - budgets_v1/types_ diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/__init__.py deleted file mode 100644 index ca191c01d3c4..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.billing.budgets import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.billing.budgets_v1.services.budget_service.client import BudgetServiceClient -from google.cloud.billing.budgets_v1.services.budget_service.async_client import BudgetServiceAsyncClient - -from google.cloud.billing.budgets_v1.types.budget_model import Budget -from google.cloud.billing.budgets_v1.types.budget_model import BudgetAmount -from google.cloud.billing.budgets_v1.types.budget_model import CustomPeriod -from google.cloud.billing.budgets_v1.types.budget_model import Filter -from google.cloud.billing.budgets_v1.types.budget_model import LastPeriodAmount -from google.cloud.billing.budgets_v1.types.budget_model import NotificationsRule -from google.cloud.billing.budgets_v1.types.budget_model import ThresholdRule -from google.cloud.billing.budgets_v1.types.budget_model import CalendarPeriod -from google.cloud.billing.budgets_v1.types.budget_service import CreateBudgetRequest -from google.cloud.billing.budgets_v1.types.budget_service import DeleteBudgetRequest -from google.cloud.billing.budgets_v1.types.budget_service import GetBudgetRequest -from google.cloud.billing.budgets_v1.types.budget_service import ListBudgetsRequest -from google.cloud.billing.budgets_v1.types.budget_service import ListBudgetsResponse -from google.cloud.billing.budgets_v1.types.budget_service import UpdateBudgetRequest - -__all__ = ('BudgetServiceClient', - 'BudgetServiceAsyncClient', - 'Budget', - 'BudgetAmount', - 'CustomPeriod', - 'Filter', - 'LastPeriodAmount', - 'NotificationsRule', - 'ThresholdRule', - 'CalendarPeriod', - 'CreateBudgetRequest', - 'DeleteBudgetRequest', - 'GetBudgetRequest', - 'ListBudgetsRequest', - 'ListBudgetsResponse', - 'UpdateBudgetRequest', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/gapic_version.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/py.typed b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/py.typed deleted file mode 100644 index b067b1963087..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-billing-budgets package uses inline types. diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/__init__.py deleted file mode 100644 index 5b9c62f7ebe5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.billing.budgets_v1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.budget_service import BudgetServiceClient -from .services.budget_service import BudgetServiceAsyncClient - -from .types.budget_model import Budget -from .types.budget_model import BudgetAmount -from .types.budget_model import CustomPeriod -from .types.budget_model import Filter -from .types.budget_model import LastPeriodAmount -from .types.budget_model import NotificationsRule -from .types.budget_model import ThresholdRule -from .types.budget_model import CalendarPeriod -from .types.budget_service import CreateBudgetRequest -from .types.budget_service import DeleteBudgetRequest -from .types.budget_service import GetBudgetRequest -from .types.budget_service import ListBudgetsRequest -from .types.budget_service import ListBudgetsResponse -from .types.budget_service import UpdateBudgetRequest - -__all__ = ( - 'BudgetServiceAsyncClient', -'Budget', -'BudgetAmount', -'BudgetServiceClient', -'CalendarPeriod', -'CreateBudgetRequest', -'CustomPeriod', -'DeleteBudgetRequest', -'Filter', -'GetBudgetRequest', -'LastPeriodAmount', -'ListBudgetsRequest', -'ListBudgetsResponse', -'NotificationsRule', -'ThresholdRule', -'UpdateBudgetRequest', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/gapic_metadata.json b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/gapic_metadata.json deleted file mode 100644 index 4c3b69415cbe..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.billing.budgets_v1", - "protoPackage": "google.cloud.billing.budgets.v1", - "schema": "1.0", - "services": { - "BudgetService": { - "clients": { - "grpc": { - "libraryClient": "BudgetServiceClient", - "rpcs": { - "CreateBudget": { - "methods": [ - "create_budget" - ] - }, - "DeleteBudget": { - "methods": [ - "delete_budget" - ] - }, - "GetBudget": { - "methods": [ - "get_budget" - ] - }, - "ListBudgets": { - "methods": [ - "list_budgets" - ] - }, - "UpdateBudget": { - "methods": [ - "update_budget" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BudgetServiceAsyncClient", - "rpcs": { - "CreateBudget": { - "methods": [ - "create_budget" - ] - }, - "DeleteBudget": { - "methods": [ - "delete_budget" - ] - }, - "GetBudget": { - "methods": [ - "get_budget" - ] - }, - "ListBudgets": { - "methods": [ - "list_budgets" - ] - }, - "UpdateBudget": { - "methods": [ - "update_budget" - ] - } - } - }, - "rest": { - "libraryClient": "BudgetServiceClient", - "rpcs": { - "CreateBudget": { - "methods": [ - "create_budget" - ] - }, - "DeleteBudget": { - "methods": [ - "delete_budget" - ] - }, - "GetBudget": { - "methods": [ - "get_budget" - ] - }, - "ListBudgets": { - "methods": [ - "list_budgets" - ] - }, - "UpdateBudget": { - "methods": [ - "update_budget" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/gapic_version.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/py.typed b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/py.typed deleted file mode 100644 index b067b1963087..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-billing-budgets package uses inline types. diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/__init__.py deleted file mode 100644 index e50c92a20ec7..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BudgetServiceClient -from .async_client import BudgetServiceAsyncClient - -__all__ = ( - 'BudgetServiceClient', - 'BudgetServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/async_client.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/async_client.py deleted file mode 100644 index 37e4cd9d1f76..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/async_client.py +++ /dev/null @@ -1,860 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.billing.budgets_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.billing.budgets_v1.services.budget_service import pagers -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import BudgetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BudgetServiceGrpcAsyncIOTransport -from .client import BudgetServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class BudgetServiceAsyncClient: - """BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - """ - - _client: BudgetServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = BudgetServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = BudgetServiceClient._DEFAULT_UNIVERSE - - budget_path = staticmethod(BudgetServiceClient.budget_path) - parse_budget_path = staticmethod(BudgetServiceClient.parse_budget_path) - common_billing_account_path = staticmethod(BudgetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BudgetServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BudgetServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(BudgetServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(BudgetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(BudgetServiceClient.parse_common_organization_path) - common_project_path = staticmethod(BudgetServiceClient.common_project_path) - parse_common_project_path = staticmethod(BudgetServiceClient.parse_common_project_path) - common_location_path = staticmethod(BudgetServiceClient.common_location_path) - parse_common_location_path = staticmethod(BudgetServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceAsyncClient: The constructed client. - """ - return BudgetServiceClient.from_service_account_info.__func__(BudgetServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceAsyncClient: The constructed client. - """ - return BudgetServiceClient.from_service_account_file.__func__(BudgetServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BudgetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BudgetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BudgetServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = BudgetServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BudgetServiceTransport, Callable[..., BudgetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the budget service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BudgetServiceTransport,Callable[..., BudgetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BudgetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BudgetServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.billing.budgets_v1.BudgetServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "credentialsType": None, - } - ) - - async def create_budget(self, - request: Optional[Union[budget_service.CreateBudgetRequest, dict]] = None, - *, - parent: Optional[str] = None, - budget: Optional[budget_model.Budget] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - async def sample_create_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1.types.CreateBudgetRequest, dict]]): - The request object. Request for CreateBudget - parent (:class:`str`): - Required. The name of the billing account to create the - budget in. Values are of the form - ``billingAccounts/{billingAccountId}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - budget (:class:`google.cloud.billing.budgets_v1.types.Budget`): - Required. Budget to create. - This corresponds to the ``budget`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, budget] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.CreateBudgetRequest): - request = budget_service.CreateBudgetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if budget is not None: - request.budget = budget - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_budget(self, - request: Optional[Union[budget_service.UpdateBudgetRequest, dict]] = None, - *, - budget: Optional[budget_model.Budget] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - async def sample_update_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.UpdateBudgetRequest( - ) - - # Make the request - response = await client.update_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1.types.UpdateBudgetRequest, dict]]): - The request object. Request for UpdateBudget - budget (:class:`google.cloud.billing.budgets_v1.types.Budget`): - Required. The updated budget object. - The budget to update is specified by the - budget name in the budget. - - This corresponds to the ``budget`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Optional. Indicates which fields in the provided budget - to update. Read-only fields (such as ``name``) cannot be - changed. If this is not provided, then only fields with - non-default values from the request are updated. See - https://developers.google.com/protocol-buffers/docs/proto3#default - for more details about default values. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [budget, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.UpdateBudgetRequest): - request = budget_service.UpdateBudgetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if budget is not None: - request.budget = budget - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("budget.name", request.budget.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_budget(self, - request: Optional[Union[budget_service.GetBudgetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - async def sample_get_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1.types.GetBudgetRequest, dict]]): - The request object. Request for GetBudget - name (:class:`str`): - Required. Name of budget to get. Values are of the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.GetBudgetRequest): - request = budget_service.GetBudgetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_budgets(self, - request: Optional[Union[budget_service.ListBudgetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBudgetsAsyncPager: - r"""Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - async def sample_list_budgets(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1.types.ListBudgetsRequest, dict]]): - The request object. Request for ListBudgets - parent (:class:`str`): - Required. Name of billing account to list budgets under. - Values are of the form - ``billingAccounts/{billingAccountId}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.services.budget_service.pagers.ListBudgetsAsyncPager: - Response for ListBudgets - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.ListBudgetsRequest): - request = budget_service.ListBudgetsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_budgets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBudgetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_budget(self, - request: Optional[Union[budget_service.DeleteBudgetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a budget. Returns successfully if already - deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - async def sample_delete_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - await client.delete_budget(request=request) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1.types.DeleteBudgetRequest, dict]]): - The request object. Request for DeleteBudget - name (:class:`str`): - Required. Name of the budget to delete. Values are of - the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.DeleteBudgetRequest): - request = budget_service.DeleteBudgetRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self) -> "BudgetServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BudgetServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/client.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/client.py deleted file mode 100644 index 585b61cc68d8..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ /dev/null @@ -1,1217 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.billing.budgets_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.billing.budgets_v1.services.budget_service import pagers -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import field_mask_pb2 # type: ignore -from .transports.base import BudgetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BudgetServiceGrpcTransport -from .transports.grpc_asyncio import BudgetServiceGrpcAsyncIOTransport -from .transports.rest import BudgetServiceRestTransport - - -class BudgetServiceClientMeta(type): - """Metaclass for the BudgetService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BudgetServiceTransport]] - _transport_registry["grpc"] = BudgetServiceGrpcTransport - _transport_registry["grpc_asyncio"] = BudgetServiceGrpcAsyncIOTransport - _transport_registry["rest"] = BudgetServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BudgetServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BudgetServiceClient(metaclass=BudgetServiceClientMeta): - """BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "billingbudgets.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "billingbudgets.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BudgetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BudgetServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def budget_path(billing_account: str,budget: str,) -> str: - """Returns a fully-qualified budget string.""" - return "billingAccounts/{billing_account}/budgets/{budget}".format(billing_account=billing_account, budget=budget, ) - - @staticmethod - def parse_budget_path(path: str) -> Dict[str,str]: - """Parses a budget path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)/budgets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = BudgetServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BudgetServiceTransport, Callable[..., BudgetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the budget service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BudgetServiceTransport,Callable[..., BudgetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BudgetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BudgetServiceClient._read_environment_variables() - self._client_cert_source = BudgetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BudgetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, BudgetServiceTransport) - if transport_provided: - # transport is a BudgetServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(BudgetServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - BudgetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[BudgetServiceTransport], Callable[..., BudgetServiceTransport]] = ( - BudgetServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., BudgetServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.billing.budgets_v1.BudgetServiceClient`.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "credentialsType": None, - } - ) - - def create_budget(self, - request: Optional[Union[budget_service.CreateBudgetRequest, dict]] = None, - *, - parent: Optional[str] = None, - budget: Optional[budget_model.Budget] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - def sample_create_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1.types.CreateBudgetRequest, dict]): - The request object. Request for CreateBudget - parent (str): - Required. The name of the billing account to create the - budget in. Values are of the form - ``billingAccounts/{billingAccountId}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - budget (google.cloud.billing.budgets_v1.types.Budget): - Required. Budget to create. - This corresponds to the ``budget`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent, budget] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.CreateBudgetRequest): - request = budget_service.CreateBudgetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if budget is not None: - request.budget = budget - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_budget(self, - request: Optional[Union[budget_service.UpdateBudgetRequest, dict]] = None, - *, - budget: Optional[budget_model.Budget] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - def sample_update_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.UpdateBudgetRequest( - ) - - # Make the request - response = client.update_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1.types.UpdateBudgetRequest, dict]): - The request object. Request for UpdateBudget - budget (google.cloud.billing.budgets_v1.types.Budget): - Required. The updated budget object. - The budget to update is specified by the - budget name in the budget. - - This corresponds to the ``budget`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Indicates which fields in the provided budget - to update. Read-only fields (such as ``name``) cannot be - changed. If this is not provided, then only fields with - non-default values from the request are updated. See - https://developers.google.com/protocol-buffers/docs/proto3#default - for more details about default values. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [budget, update_mask] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.UpdateBudgetRequest): - request = budget_service.UpdateBudgetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if budget is not None: - request.budget = budget - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("budget.name", request.budget.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_budget(self, - request: Optional[Union[budget_service.GetBudgetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - def sample_get_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = client.get_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1.types.GetBudgetRequest, dict]): - The request object. Request for GetBudget - name (str): - Required. Name of budget to get. Values are of the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.GetBudgetRequest): - request = budget_service.GetBudgetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_budgets(self, - request: Optional[Union[budget_service.ListBudgetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBudgetsPager: - r"""Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - def sample_list_budgets(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1.types.ListBudgetsRequest, dict]): - The request object. Request for ListBudgets - parent (str): - Required. Name of billing account to list budgets under. - Values are of the form - ``billingAccounts/{billingAccountId}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1.services.budget_service.pagers.ListBudgetsPager: - Response for ListBudgets - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [parent] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.ListBudgetsRequest): - request = budget_service.ListBudgetsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_budgets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBudgetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_budget(self, - request: Optional[Union[budget_service.DeleteBudgetRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a budget. Returns successfully if already - deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1 - - def sample_delete_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - client.delete_budget(request=request) - - Args: - request (Union[google.cloud.billing.budgets_v1.types.DeleteBudgetRequest, dict]): - The request object. Request for DeleteBudget - name (str): - Required. Name of the budget to delete. Values are of - the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.DeleteBudgetRequest): - request = budget_service.DeleteBudgetRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "BudgetServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BudgetServiceClient", -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/pagers.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/pagers.py deleted file mode 100644 index aee2f31160ea..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/pagers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service - - -class ListBudgetsPager: - """A pager for iterating through ``list_budgets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``budgets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBudgets`` requests and continue to iterate - through the ``budgets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., budget_service.ListBudgetsResponse], - request: budget_service.ListBudgetsRequest, - response: budget_service.ListBudgetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.billing.budgets_v1.types.ListBudgetsRequest): - The initial request object. - response (google.cloud.billing.budgets_v1.types.ListBudgetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = budget_service.ListBudgetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[budget_service.ListBudgetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[budget_model.Budget]: - for page in self.pages: - yield from page.budgets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBudgetsAsyncPager: - """A pager for iterating through ``list_budgets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``budgets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBudgets`` requests and continue to iterate - through the ``budgets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.billing.budgets_v1.types.ListBudgetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[budget_service.ListBudgetsResponse]], - request: budget_service.ListBudgetsRequest, - response: budget_service.ListBudgetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.billing.budgets_v1.types.ListBudgetsRequest): - The initial request object. - response (google.cloud.billing.budgets_v1.types.ListBudgetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = budget_service.ListBudgetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[budget_service.ListBudgetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[budget_model.Budget]: - async def async_generator(): - async for page in self.pages: - for response in page.budgets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/README.rst b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/README.rst deleted file mode 100644 index 2dbfa440f9fa..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`BudgetServiceTransport` is the ABC for all transports. -- public child `BudgetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `BudgetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseBudgetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `BudgetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/__init__.py deleted file mode 100644 index 63abbad0dcb0..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BudgetServiceTransport -from .grpc import BudgetServiceGrpcTransport -from .grpc_asyncio import BudgetServiceGrpcAsyncIOTransport -from .rest import BudgetServiceRestTransport -from .rest import BudgetServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BudgetServiceTransport]] -_transport_registry['grpc'] = BudgetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = BudgetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = BudgetServiceRestTransport - -__all__ = ( - 'BudgetServiceTransport', - 'BudgetServiceGrpcTransport', - 'BudgetServiceGrpcAsyncIOTransport', - 'BudgetServiceRestTransport', - 'BudgetServiceRestInterceptor', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/base.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/base.py deleted file mode 100644 index 4243cb28f92b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/base.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.billing.budgets_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BudgetServiceTransport(abc.ABC): - """Abstract transport class for BudgetService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'billingbudgets.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_budget: gapic_v1.method.wrap_method( - self.create_budget, - default_timeout=60.0, - client_info=client_info, - ), - self.update_budget: gapic_v1.method.wrap_method( - self.update_budget, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_budget: gapic_v1.method.wrap_method( - self.get_budget, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_budgets: gapic_v1.method.wrap_method( - self.list_budgets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_budget: gapic_v1.method.wrap_method( - self.delete_budget, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - Union[ - budget_model.Budget, - Awaitable[budget_model.Budget] - ]]: - raise NotImplementedError() - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - Union[ - budget_model.Budget, - Awaitable[budget_model.Budget] - ]]: - raise NotImplementedError() - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - Union[ - budget_model.Budget, - Awaitable[budget_model.Budget] - ]]: - raise NotImplementedError() - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - Union[ - budget_service.ListBudgetsResponse, - Awaitable[budget_service.ListBudgetsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BudgetServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py deleted file mode 100644 index 44c5410e06d2..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc.py +++ /dev/null @@ -1,476 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore -from .base import BudgetServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BudgetServiceGrpcTransport(BudgetServiceTransport): - """gRPC backend transport for BudgetService. - - BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - budget_model.Budget]: - r"""Return a callable for the create budget method over gRPC. - - Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - Returns: - Callable[[~.CreateBudgetRequest], - ~.Budget]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_budget' not in self._stubs: - self._stubs['create_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/CreateBudget', - request_serializer=budget_service.CreateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['create_budget'] - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - budget_model.Budget]: - r"""Return a callable for the update budget method over gRPC. - - Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - Returns: - Callable[[~.UpdateBudgetRequest], - ~.Budget]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_budget' not in self._stubs: - self._stubs['update_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/UpdateBudget', - request_serializer=budget_service.UpdateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['update_budget'] - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - budget_model.Budget]: - r"""Return a callable for the get budget method over gRPC. - - Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.GetBudgetRequest], - ~.Budget]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_budget' not in self._stubs: - self._stubs['get_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/GetBudget', - request_serializer=budget_service.GetBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['get_budget'] - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - budget_service.ListBudgetsResponse]: - r"""Return a callable for the list budgets method over gRPC. - - Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.ListBudgetsRequest], - ~.ListBudgetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_budgets' not in self._stubs: - self._stubs['list_budgets'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/ListBudgets', - request_serializer=budget_service.ListBudgetsRequest.serialize, - response_deserializer=budget_service.ListBudgetsResponse.deserialize, - ) - return self._stubs['list_budgets'] - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete budget method over gRPC. - - Deletes a budget. Returns successfully if already - deleted. - - Returns: - Callable[[~.DeleteBudgetRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_budget' not in self._stubs: - self._stubs['delete_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/DeleteBudget', - request_serializer=budget_service.DeleteBudgetRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_budget'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BudgetServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py deleted file mode 100644 index aaa3e46e3be8..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,556 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore -from .base import BudgetServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import BudgetServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BudgetServiceGrpcAsyncIOTransport(BudgetServiceTransport): - """gRPC AsyncIO backend transport for BudgetService. - - BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - Awaitable[budget_model.Budget]]: - r"""Return a callable for the create budget method over gRPC. - - Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - Returns: - Callable[[~.CreateBudgetRequest], - Awaitable[~.Budget]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_budget' not in self._stubs: - self._stubs['create_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/CreateBudget', - request_serializer=budget_service.CreateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['create_budget'] - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - Awaitable[budget_model.Budget]]: - r"""Return a callable for the update budget method over gRPC. - - Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - Returns: - Callable[[~.UpdateBudgetRequest], - Awaitable[~.Budget]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_budget' not in self._stubs: - self._stubs['update_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/UpdateBudget', - request_serializer=budget_service.UpdateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['update_budget'] - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - Awaitable[budget_model.Budget]]: - r"""Return a callable for the get budget method over gRPC. - - Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.GetBudgetRequest], - Awaitable[~.Budget]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_budget' not in self._stubs: - self._stubs['get_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/GetBudget', - request_serializer=budget_service.GetBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['get_budget'] - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - Awaitable[budget_service.ListBudgetsResponse]]: - r"""Return a callable for the list budgets method over gRPC. - - Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.ListBudgetsRequest], - Awaitable[~.ListBudgetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_budgets' not in self._stubs: - self._stubs['list_budgets'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/ListBudgets', - request_serializer=budget_service.ListBudgetsRequest.serialize, - response_deserializer=budget_service.ListBudgetsResponse.deserialize, - ) - return self._stubs['list_budgets'] - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete budget method over gRPC. - - Deletes a budget. Returns successfully if already - deleted. - - Returns: - Callable[[~.DeleteBudgetRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_budget' not in self._stubs: - self._stubs['delete_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1.BudgetService/DeleteBudget', - request_serializer=budget_service.DeleteBudgetRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_budget'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_budget: self._wrap_method( - self.create_budget, - default_timeout=60.0, - client_info=client_info, - ), - self.update_budget: self._wrap_method( - self.update_budget, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_budget: self._wrap_method( - self.get_budget, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_budgets: self._wrap_method( - self.list_budgets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_budget: self._wrap_method( - self.delete_budget, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'BudgetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py deleted file mode 100644 index 93edf864bae7..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/rest.py +++ /dev/null @@ -1,1002 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging -import json # type: ignore - -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import gapic_v1 - -from google.protobuf import json_format - -from requests import __version__ as requests_version -import dataclasses -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - - -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore - - -from .rest_base import _BaseBudgetServiceRestTransport -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=f"requests@{requests_version}", -) - - -class BudgetServiceRestInterceptor: - """Interceptor for BudgetService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the BudgetServiceRestTransport. - - .. code-block:: python - class MyCustomBudgetServiceInterceptor(BudgetServiceRestInterceptor): - def pre_create_budget(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_budget(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_budget(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_budget(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_budget(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_budgets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_budgets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_budget(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_budget(self, response): - logging.log(f"Received response: {response}") - return response - - transport = BudgetServiceRestTransport(interceptor=MyCustomBudgetServiceInterceptor()) - client = BudgetServiceClient(transport=transport) - - - """ - def pre_create_budget(self, request: budget_service.CreateBudgetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_service.CreateBudgetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for create_budget - - Override in a subclass to manipulate the request or metadata - before they are sent to the BudgetService server. - """ - return request, metadata - - def post_create_budget(self, response: budget_model.Budget) -> budget_model.Budget: - """Post-rpc interceptor for create_budget - - DEPRECATED. Please use the `post_create_budget_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BudgetService server but before - it is returned to user code. This `post_create_budget` interceptor runs - before the `post_create_budget_with_metadata` interceptor. - """ - return response - - def post_create_budget_with_metadata(self, response: budget_model.Budget, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_model.Budget, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for create_budget - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BudgetService server but before it is returned to user code. - - We recommend only using this `post_create_budget_with_metadata` - interceptor in new development instead of the `post_create_budget` interceptor. - When both interceptors are used, this `post_create_budget_with_metadata` interceptor runs after the - `post_create_budget` interceptor. The (possibly modified) response returned by - `post_create_budget` will be passed to - `post_create_budget_with_metadata`. - """ - return response, metadata - - def pre_delete_budget(self, request: budget_service.DeleteBudgetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_service.DeleteBudgetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for delete_budget - - Override in a subclass to manipulate the request or metadata - before they are sent to the BudgetService server. - """ - return request, metadata - - def pre_get_budget(self, request: budget_service.GetBudgetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_service.GetBudgetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for get_budget - - Override in a subclass to manipulate the request or metadata - before they are sent to the BudgetService server. - """ - return request, metadata - - def post_get_budget(self, response: budget_model.Budget) -> budget_model.Budget: - """Post-rpc interceptor for get_budget - - DEPRECATED. Please use the `post_get_budget_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BudgetService server but before - it is returned to user code. This `post_get_budget` interceptor runs - before the `post_get_budget_with_metadata` interceptor. - """ - return response - - def post_get_budget_with_metadata(self, response: budget_model.Budget, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_model.Budget, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for get_budget - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BudgetService server but before it is returned to user code. - - We recommend only using this `post_get_budget_with_metadata` - interceptor in new development instead of the `post_get_budget` interceptor. - When both interceptors are used, this `post_get_budget_with_metadata` interceptor runs after the - `post_get_budget` interceptor. The (possibly modified) response returned by - `post_get_budget` will be passed to - `post_get_budget_with_metadata`. - """ - return response, metadata - - def pre_list_budgets(self, request: budget_service.ListBudgetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_service.ListBudgetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_budgets - - Override in a subclass to manipulate the request or metadata - before they are sent to the BudgetService server. - """ - return request, metadata - - def post_list_budgets(self, response: budget_service.ListBudgetsResponse) -> budget_service.ListBudgetsResponse: - """Post-rpc interceptor for list_budgets - - DEPRECATED. Please use the `post_list_budgets_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BudgetService server but before - it is returned to user code. This `post_list_budgets` interceptor runs - before the `post_list_budgets_with_metadata` interceptor. - """ - return response - - def post_list_budgets_with_metadata(self, response: budget_service.ListBudgetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_service.ListBudgetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_budgets - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BudgetService server but before it is returned to user code. - - We recommend only using this `post_list_budgets_with_metadata` - interceptor in new development instead of the `post_list_budgets` interceptor. - When both interceptors are used, this `post_list_budgets_with_metadata` interceptor runs after the - `post_list_budgets` interceptor. The (possibly modified) response returned by - `post_list_budgets` will be passed to - `post_list_budgets_with_metadata`. - """ - return response, metadata - - def pre_update_budget(self, request: budget_service.UpdateBudgetRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_service.UpdateBudgetRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for update_budget - - Override in a subclass to manipulate the request or metadata - before they are sent to the BudgetService server. - """ - return request, metadata - - def post_update_budget(self, response: budget_model.Budget) -> budget_model.Budget: - """Post-rpc interceptor for update_budget - - DEPRECATED. Please use the `post_update_budget_with_metadata` - interceptor instead. - - Override in a subclass to read or manipulate the response - after it is returned by the BudgetService server but before - it is returned to user code. This `post_update_budget` interceptor runs - before the `post_update_budget_with_metadata` interceptor. - """ - return response - - def post_update_budget_with_metadata(self, response: budget_model.Budget, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[budget_model.Budget, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_budget - - Override in a subclass to read or manipulate the response or metadata after it - is returned by the BudgetService server but before it is returned to user code. - - We recommend only using this `post_update_budget_with_metadata` - interceptor in new development instead of the `post_update_budget` interceptor. - When both interceptors are used, this `post_update_budget_with_metadata` interceptor runs after the - `post_update_budget` interceptor. The (possibly modified) response returned by - `post_update_budget` will be passed to - `post_update_budget_with_metadata`. - """ - return response, metadata - - -@dataclasses.dataclass -class BudgetServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: BudgetServiceRestInterceptor - - -class BudgetServiceRestTransport(_BaseBudgetServiceRestTransport): - """REST backend synchronous transport for BudgetService. - - BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[BudgetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - url_scheme=url_scheme, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or BudgetServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateBudget(_BaseBudgetServiceRestTransport._BaseCreateBudget, BudgetServiceRestStub): - def __hash__(self): - return hash("BudgetServiceRestTransport.CreateBudget") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: budget_service.CreateBudgetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> budget_model.Budget: - r"""Call the create budget method over HTTP. - - Args: - request (~.budget_service.CreateBudgetRequest): - The request object. Request for CreateBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.budget_model.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - - http_options = _BaseBudgetServiceRestTransport._BaseCreateBudget._get_http_options() - - request, metadata = self._interceptor.pre_create_budget(request, metadata) - transcoded_request = _BaseBudgetServiceRestTransport._BaseCreateBudget._get_transcoded_request(http_options, request) - - body = _BaseBudgetServiceRestTransport._BaseCreateBudget._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBudgetServiceRestTransport._BaseCreateBudget._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.billing.budgets_v1.BudgetServiceClient.CreateBudget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "CreateBudget", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BudgetServiceRestTransport._CreateBudget._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = budget_model.Budget() - pb_resp = budget_model.Budget.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_create_budget(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_budget_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = budget_model.Budget.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.billing.budgets_v1.BudgetServiceClient.create_budget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "CreateBudget", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _DeleteBudget(_BaseBudgetServiceRestTransport._BaseDeleteBudget, BudgetServiceRestStub): - def __hash__(self): - return hash("BudgetServiceRestTransport.DeleteBudget") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: budget_service.DeleteBudgetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): - r"""Call the delete budget method over HTTP. - - Args: - request (~.budget_service.DeleteBudgetRequest): - The request object. Request for DeleteBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - - http_options = _BaseBudgetServiceRestTransport._BaseDeleteBudget._get_http_options() - - request, metadata = self._interceptor.pre_delete_budget(request, metadata) - transcoded_request = _BaseBudgetServiceRestTransport._BaseDeleteBudget._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBudgetServiceRestTransport._BaseDeleteBudget._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = json_format.MessageToJson(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.billing.budgets_v1.BudgetServiceClient.DeleteBudget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "DeleteBudget", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BudgetServiceRestTransport._DeleteBudget._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetBudget(_BaseBudgetServiceRestTransport._BaseGetBudget, BudgetServiceRestStub): - def __hash__(self): - return hash("BudgetServiceRestTransport.GetBudget") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: budget_service.GetBudgetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> budget_model.Budget: - r"""Call the get budget method over HTTP. - - Args: - request (~.budget_service.GetBudgetRequest): - The request object. Request for GetBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.budget_model.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - - http_options = _BaseBudgetServiceRestTransport._BaseGetBudget._get_http_options() - - request, metadata = self._interceptor.pre_get_budget(request, metadata) - transcoded_request = _BaseBudgetServiceRestTransport._BaseGetBudget._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBudgetServiceRestTransport._BaseGetBudget._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.billing.budgets_v1.BudgetServiceClient.GetBudget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "GetBudget", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BudgetServiceRestTransport._GetBudget._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = budget_model.Budget() - pb_resp = budget_model.Budget.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_get_budget(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_budget_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = budget_model.Budget.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.billing.budgets_v1.BudgetServiceClient.get_budget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "GetBudget", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _ListBudgets(_BaseBudgetServiceRestTransport._BaseListBudgets, BudgetServiceRestStub): - def __hash__(self): - return hash("BudgetServiceRestTransport.ListBudgets") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - return response - - def __call__(self, - request: budget_service.ListBudgetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> budget_service.ListBudgetsResponse: - r"""Call the list budgets method over HTTP. - - Args: - request (~.budget_service.ListBudgetsRequest): - The request object. Request for ListBudgets - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.budget_service.ListBudgetsResponse: - Response for ListBudgets - """ - - http_options = _BaseBudgetServiceRestTransport._BaseListBudgets._get_http_options() - - request, metadata = self._interceptor.pre_list_budgets(request, metadata) - transcoded_request = _BaseBudgetServiceRestTransport._BaseListBudgets._get_transcoded_request(http_options, request) - - # Jsonify the query params - query_params = _BaseBudgetServiceRestTransport._BaseListBudgets._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.billing.budgets_v1.BudgetServiceClient.ListBudgets", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "ListBudgets", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BudgetServiceRestTransport._ListBudgets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = budget_service.ListBudgetsResponse() - pb_resp = budget_service.ListBudgetsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_list_budgets(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_budgets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = budget_service.ListBudgetsResponse.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.billing.budgets_v1.BudgetServiceClient.list_budgets", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "ListBudgets", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - class _UpdateBudget(_BaseBudgetServiceRestTransport._BaseUpdateBudget, BudgetServiceRestStub): - def __hash__(self): - return hash("BudgetServiceRestTransport.UpdateBudget") - - @staticmethod - def _get_response( - host, - metadata, - query_params, - session, - timeout, - transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(session, method)( - "{host}{uri}".format(host=host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - return response - - def __call__(self, - request: budget_service.UpdateBudgetRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> budget_model.Budget: - r"""Call the update budget method over HTTP. - - Args: - request (~.budget_service.UpdateBudgetRequest): - The request object. Request for UpdateBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.budget_model.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - - http_options = _BaseBudgetServiceRestTransport._BaseUpdateBudget._get_http_options() - - request, metadata = self._interceptor.pre_update_budget(request, metadata) - transcoded_request = _BaseBudgetServiceRestTransport._BaseUpdateBudget._get_transcoded_request(http_options, request) - - body = _BaseBudgetServiceRestTransport._BaseUpdateBudget._get_request_body_json(transcoded_request) - - # Jsonify the query params - query_params = _BaseBudgetServiceRestTransport._BaseUpdateBudget._get_query_params_json(transcoded_request) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] - try: - request_payload = type(request).to_json(request) - except: - request_payload = None - http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), - } - _LOGGER.debug( - f"Sending request for google.cloud.billing.budgets_v1.BudgetServiceClient.UpdateBudget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "UpdateBudget", - "httpRequest": http_request, - "metadata": http_request["headers"], - }, - ) - - # Send the request - response = BudgetServiceRestTransport._UpdateBudget._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = budget_model.Budget() - pb_resp = budget_model.Budget.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - - resp = self._interceptor.post_update_budget(resp) - response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_budget_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - try: - response_payload = budget_model.Budget.to_json(response) - except: - response_payload = None - http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, - } - _LOGGER.debug( - "Received response for google.cloud.billing.budgets_v1.BudgetServiceClient.update_budget", - extra = { - "serviceName": "google.cloud.billing.budgets.v1.BudgetService", - "rpcName": "UpdateBudget", - "metadata": http_response["headers"], - "httpResponse": http_response, - }, - ) - return resp - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - budget_model.Budget]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBudget(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBudget(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - budget_model.Budget]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBudget(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - budget_service.ListBudgetsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBudgets(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - budget_model.Budget]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBudget(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'BudgetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/rest_base.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/rest_base.py deleted file mode 100644 index 2c43ac82995b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/services/budget_service/transports/rest_base.py +++ /dev/null @@ -1,298 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json # type: ignore -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from .base import BudgetServiceTransport, DEFAULT_CLIENT_INFO - -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union - - -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore - - -class _BaseBudgetServiceRestTransport(BudgetServiceTransport): - """Base REST backend transport for BudgetService. - - Note: This class is not meant to be used directly. Use its sync and - async sub-classes instead. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - """ - - def __init__(self, *, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[Any]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - - class _BaseCreateBudget: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=billingAccounts/*}/budgets', - 'body': 'budget', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = budget_service.CreateBudgetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBudgetServiceRestTransport._BaseCreateBudget._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseDeleteBudget: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=billingAccounts/*/budgets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = budget_service.DeleteBudgetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBudgetServiceRestTransport._BaseDeleteBudget._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseGetBudget: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=billingAccounts/*/budgets/*}', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = budget_service.GetBudgetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBudgetServiceRestTransport._BaseGetBudget._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseListBudgets: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=billingAccounts/*}/budgets', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = budget_service.ListBudgetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBudgetServiceRestTransport._BaseListBudgets._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - class _BaseUpdateBudget: - def __hash__(self): # pragma: NO COVER - return NotImplementedError("__hash__ must be implemented.") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - @staticmethod - def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{budget.name=billingAccounts/*/budgets/*}', - 'body': 'budget', - }, - ] - return http_options - - @staticmethod - def _get_transcoded_request(http_options, request): - pb_request = budget_service.UpdateBudgetRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - return transcoded_request - - @staticmethod - def _get_request_body_json(transcoded_request): - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=True - ) - return body - @staticmethod - def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=True, - )) - query_params.update(_BaseBudgetServiceRestTransport._BaseUpdateBudget._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" - return query_params - - -__all__=( - '_BaseBudgetServiceRestTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/__init__.py deleted file mode 100644 index 632ea73525b1..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/__init__.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .budget_model import ( - Budget, - BudgetAmount, - CustomPeriod, - Filter, - LastPeriodAmount, - NotificationsRule, - ThresholdRule, - CalendarPeriod, -) -from .budget_service import ( - CreateBudgetRequest, - DeleteBudgetRequest, - GetBudgetRequest, - ListBudgetsRequest, - ListBudgetsResponse, - UpdateBudgetRequest, -) - -__all__ = ( - 'Budget', - 'BudgetAmount', - 'CustomPeriod', - 'Filter', - 'LastPeriodAmount', - 'NotificationsRule', - 'ThresholdRule', - 'CalendarPeriod', - 'CreateBudgetRequest', - 'DeleteBudgetRequest', - 'GetBudgetRequest', - 'ListBudgetsRequest', - 'ListBudgetsResponse', - 'UpdateBudgetRequest', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/budget_model.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/budget_model.py deleted file mode 100644 index 1cb1a80c0e82..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/budget_model.py +++ /dev/null @@ -1,561 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import money_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.billing.budgets.v1', - manifest={ - 'CalendarPeriod', - 'Budget', - 'BudgetAmount', - 'LastPeriodAmount', - 'ThresholdRule', - 'NotificationsRule', - 'Filter', - 'CustomPeriod', - }, -) - - -class CalendarPeriod(proto.Enum): - r"""A ``CalendarPeriod`` represents the abstract concept of a time - period that has a canonical start. Grammatically, "the start of the - current ``CalendarPeriod``". All calendar times begin at 12 AM US - and Canadian Pacific Time (UTC-8). - - Values: - CALENDAR_PERIOD_UNSPECIFIED (0): - Calendar period is unset. This is the default - if the budget is for a custom time period - (CustomPeriod). - MONTH (1): - A month. Month starts on the first day of - each month, such as January 1, February 1, March - 1, and so on. - QUARTER (2): - A quarter. Quarters start on dates January 1, - April 1, July 1, and October 1 of each year. - YEAR (3): - A year. Year starts on January 1. - """ - CALENDAR_PERIOD_UNSPECIFIED = 0 - MONTH = 1 - QUARTER = 2 - YEAR = 3 - - -class Budget(proto.Message): - r"""A budget is a plan that describes what you expect to spend on - Cloud projects, plus the rules to execute as spend is tracked - against that plan, (for example, send an alert when 90% of the - target spend is met). The budget time period is configurable, - with options such as month (default), quarter, year, or custom - time period. - - Attributes: - name (str): - Output only. Resource name of the budget. The resource name - implies the scope of a budget. Values are of the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - display_name (str): - User data for display name in UI. The name - must be less than or equal to 60 characters. - budget_filter (google.cloud.billing.budgets_v1.types.Filter): - Optional. Filters that define which resources - are used to compute the actual spend against the - budget amount, such as projects, services, and - the budget's time period, as well as other - filters. - amount (google.cloud.billing.budgets_v1.types.BudgetAmount): - Required. Budgeted amount. - threshold_rules (MutableSequence[google.cloud.billing.budgets_v1.types.ThresholdRule]): - Optional. Rules that trigger alerts (notifications of - thresholds being crossed) when spend exceeds the specified - percentages of the budget. - - Optional for ``pubsubTopic`` notifications. - - Required if using email notifications. - notifications_rule (google.cloud.billing.budgets_v1.types.NotificationsRule): - Optional. Rules to apply to notifications - sent based on budget spend and thresholds. - etag (str): - Optional. Etag to validate that the object is - unchanged for a read-modify-write operation. - An empty etag causes an update to overwrite - other changes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - budget_filter: 'Filter' = proto.Field( - proto.MESSAGE, - number=3, - message='Filter', - ) - amount: 'BudgetAmount' = proto.Field( - proto.MESSAGE, - number=4, - message='BudgetAmount', - ) - threshold_rules: MutableSequence['ThresholdRule'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='ThresholdRule', - ) - notifications_rule: 'NotificationsRule' = proto.Field( - proto.MESSAGE, - number=6, - message='NotificationsRule', - ) - etag: str = proto.Field( - proto.STRING, - number=7, - ) - - -class BudgetAmount(proto.Message): - r"""The budgeted amount for each usage period. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - specified_amount (google.type.money_pb2.Money): - A specified amount to use as the budget. ``currency_code`` - is optional. If specified when creating a budget, it must - match the currency of the billing account. If specified when - updating a budget, it must match the currency_code of the - existing budget. The ``currency_code`` is provided on - output. - - This field is a member of `oneof`_ ``budget_amount``. - last_period_amount (google.cloud.billing.budgets_v1.types.LastPeriodAmount): - Use the last period's actual spend as the budget for the - present period. LastPeriodAmount can only be set when the - budget's time period is a - [Filter.calendar_period][google.cloud.billing.budgets.v1.Filter.calendar_period]. - It cannot be set in combination with - [Filter.custom_period][google.cloud.billing.budgets.v1.Filter.custom_period]. - - This field is a member of `oneof`_ ``budget_amount``. - """ - - specified_amount: money_pb2.Money = proto.Field( - proto.MESSAGE, - number=1, - oneof='budget_amount', - message=money_pb2.Money, - ) - last_period_amount: 'LastPeriodAmount' = proto.Field( - proto.MESSAGE, - number=2, - oneof='budget_amount', - message='LastPeriodAmount', - ) - - -class LastPeriodAmount(proto.Message): - r"""Describes a budget amount targeted to the last - [Filter.calendar_period][google.cloud.billing.budgets.v1.Filter.calendar_period] - spend. At this time, the amount is automatically 100% of the last - calendar period's spend; that is, there are no other options yet. - LastPeriodAmount cannot be set for a budget configured with a - [Filter.custom_period][google.cloud.billing.budgets.v1.Filter.custom_period]. - - """ - - -class ThresholdRule(proto.Message): - r"""ThresholdRule contains the definition of a threshold. Threshold - rules define the triggering events used to generate a budget - notification email. When a threshold is crossed (spend exceeds the - specified percentages of the budget), budget alert emails are sent - to the email recipients you specify in the - `NotificationsRule <#notificationsrule>`__. - - Threshold rules also affect the fields included in the `JSON data - object `__ - sent to a Pub/Sub topic. - - Threshold rules are *required* if using email notifications. - - Threshold rules are *optional* if only setting a ```pubsubTopic`` - NotificationsRule <#NotificationsRule>`__, unless you want your JSON - data object to include data about the thresholds you set. - - For more information, see `set budget threshold rules and - actions `__. - - Attributes: - threshold_percent (float): - Required. Send an alert when this threshold - is exceeded. This is a 1.0-based percentage, so - 0.5 = 50%. Validation: non-negative number. - spend_basis (google.cloud.billing.budgets_v1.types.ThresholdRule.Basis): - Optional. The type of basis used to determine if spend has - passed the threshold. Behavior defaults to CURRENT_SPEND if - not set. - """ - class Basis(proto.Enum): - r"""The type of basis used to determine if spend has passed the - threshold. - - Values: - BASIS_UNSPECIFIED (0): - Unspecified threshold basis. - CURRENT_SPEND (1): - Use current spend as the basis for comparison - against the threshold. - FORECASTED_SPEND (2): - Use forecasted spend for the period as the basis for - comparison against the threshold. FORECASTED_SPEND can only - be set when the budget's time period is a - [Filter.calendar_period][google.cloud.billing.budgets.v1.Filter.calendar_period]. - It cannot be set in combination with - [Filter.custom_period][google.cloud.billing.budgets.v1.Filter.custom_period]. - """ - BASIS_UNSPECIFIED = 0 - CURRENT_SPEND = 1 - FORECASTED_SPEND = 2 - - threshold_percent: float = proto.Field( - proto.DOUBLE, - number=1, - ) - spend_basis: Basis = proto.Field( - proto.ENUM, - number=2, - enum=Basis, - ) - - -class NotificationsRule(proto.Message): - r"""NotificationsRule defines notifications that are sent based - on budget spend and thresholds. - - Attributes: - pubsub_topic (str): - Optional. The name of the Pub/Sub topic where budget-related - messages are published, in the form - ``projects/{project_id}/topics/{topic_id}``. Updates are - sent to the topic at regular intervals; the timing of the - updates is not dependent on the `threshold - rules <#thresholdrule>`__ you've set. - - Note that if you want your `Pub/Sub JSON - object `__ - to contain data for ``alertThresholdExceeded``, you need at - least one `alert threshold rule <#thresholdrule>`__. When - you set threshold rules, you must also enable at least one - of the email notification options, either using the default - IAM recipients or Cloud Monitoring email notification - channels. - - To use Pub/Sub topics with budgets, you must do the - following: - - 1. Create the Pub/Sub topic before connecting it to your - budget. For guidance, see `Manage programmatic budget - alert - notifications `__. - - 2. Grant the API caller the ``pubsub.topics.setIamPolicy`` - permission on the Pub/Sub topic. If not set, the API call - fails with PERMISSION_DENIED. For additional details on - Pub/Sub roles and permissions, see `Permissions required - for this - task `__. - schema_version (str): - Optional. Required when - [NotificationsRule.pubsub_topic][google.cloud.billing.budgets.v1.NotificationsRule.pubsub_topic] - is set. The schema version of the notification sent to - [NotificationsRule.pubsub_topic][google.cloud.billing.budgets.v1.NotificationsRule.pubsub_topic]. - Only "1.0" is accepted. It represents the JSON schema as - defined in - https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications#notification_format. - monitoring_notification_channels (MutableSequence[str]): - Optional. Email targets to send notifications to when a - threshold is exceeded. This is in addition to the - ``DefaultIamRecipients`` who receive alert emails based on - their billing account IAM role. The value is the full REST - resource name of a Cloud Monitoring email notification - channel with the form - ``projects/{project_id}/notificationChannels/{channel_id}``. - A maximum of 5 email notifications are allowed. - - To customize budget alert email recipients with monitoring - notification channels, you *must create the monitoring - notification channels before you link them to a budget*. For - guidance on setting up notification channels to use with - budgets, see `Customize budget alert email - recipients `__. - - For Cloud Billing budget alerts, you *must use email - notification channels*. The other types of notification - channels are *not* supported, such as Slack, SMS, or - PagerDuty. If you want to `send budget notifications to - Slack `__, - use a pubsubTopic and configure `programmatic - notifications `__. - disable_default_iam_recipients (bool): - Optional. When set to true, disables default - notifications sent when a threshold is exceeded. - Default notifications are sent to those with - Billing Account Administrator and Billing - Account User IAM roles for the target account. - enable_project_level_recipients (bool): - Optional. When set to true, and when the budget has a single - project configured, notifications will be sent to project - level recipients of that project. This field will be ignored - if the budget has multiple or no project configured. - - Currently, project level recipients are the users with - ``Owner`` role on a cloud project. - """ - - pubsub_topic: str = proto.Field( - proto.STRING, - number=1, - ) - schema_version: str = proto.Field( - proto.STRING, - number=2, - ) - monitoring_notification_channels: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - disable_default_iam_recipients: bool = proto.Field( - proto.BOOL, - number=4, - ) - enable_project_level_recipients: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class Filter(proto.Message): - r"""A filter for a budget, limiting the scope of the cost to - calculate. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - projects (MutableSequence[str]): - Optional. A set of projects of the form - ``projects/{project}``, specifying that usage from only this - set of projects should be included in the budget. If - omitted, the report includes all usage for the billing - account, regardless of which project the usage occurred on. - resource_ancestors (MutableSequence[str]): - Optional. A set of folder and organization names of the form - ``folders/{folderId}`` or - ``organizations/{organizationId}``, specifying that usage - from only this set of folders and organizations should be - included in the budget. If omitted, the budget includes all - usage that the billing account pays for. If the folder or - organization contains projects that are paid for by a - different Cloud Billing account, the budget *doesn't* apply - to those projects. - credit_types (MutableSequence[str]): - Optional. If - [Filter.credit_types_treatment][google.cloud.billing.budgets.v1.Filter.credit_types_treatment] - is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types - to be subtracted from gross cost to determine the spend for - threshold calculations. See `a list of acceptable credit - type - values `__. - - If - [Filter.credit_types_treatment][google.cloud.billing.budgets.v1.Filter.credit_types_treatment] - is **not** INCLUDE_SPECIFIED_CREDITS, this field must be - empty. - credit_types_treatment (google.cloud.billing.budgets_v1.types.Filter.CreditTypesTreatment): - Optional. If not set, default behavior is - ``INCLUDE_ALL_CREDITS``. - services (MutableSequence[str]): - Optional. A set of services of the form - ``services/{service_id}``, specifying that usage from only - this set of services should be included in the budget. If - omitted, the report includes usage for all the services. The - service names are available through the Catalog API: - https://cloud.google.com/billing/v1/how-tos/catalog-api. - subaccounts (MutableSequence[str]): - Optional. A set of subaccounts of the form - ``billingAccounts/{account_id}``, specifying that usage from - only this set of subaccounts should be included in the - budget. If a subaccount is set to the name of the parent - account, usage from the parent account is included. If the - field is omitted, the report includes usage from the parent - account and all subaccounts, if they exist. - labels (MutableMapping[str, google.protobuf.struct_pb2.ListValue]): - Optional. A single label and value pair specifying that - usage from only this set of labeled resources should be - included in the budget. If omitted, the report includes all - labeled and unlabeled usage. - - An object containing a single ``"key": value`` pair. - Example: ``{ "name": "wrench" }``. - - *Currently, multiple entries or multiple values per entry - are not allowed.* - calendar_period (google.cloud.billing.budgets_v1.types.CalendarPeriod): - Optional. Specifies to track usage for - recurring calendar period. For example, assume - that CalendarPeriod.QUARTER is set. The budget - tracks usage from April 1 to June 30, when the - current calendar month is April, May, June. - After that, it tracks usage from July 1 to - September 30 when the current calendar month is - July, August, September, so on. - - This field is a member of `oneof`_ ``usage_period``. - custom_period (google.cloud.billing.budgets_v1.types.CustomPeriod): - Optional. Specifies to track usage from any - start date (required) to any end date - (optional). This time period is static, it does - not recur. - - This field is a member of `oneof`_ ``usage_period``. - """ - class CreditTypesTreatment(proto.Enum): - r"""Specifies how credits are applied when determining the spend for - threshold calculations. Budgets track the total cost minus any - applicable selected credits. `See the documentation for a list of - credit - types `__. - - Values: - CREDIT_TYPES_TREATMENT_UNSPECIFIED (0): - No description available. - INCLUDE_ALL_CREDITS (1): - All types of credit are subtracted from the - gross cost to determine the spend for threshold - calculations. - EXCLUDE_ALL_CREDITS (2): - All types of credit are added to the net cost - to determine the spend for threshold - calculations. - INCLUDE_SPECIFIED_CREDITS (3): - `Credit - types `__ - specified in the credit_types field are subtracted from the - gross cost to determine the spend for threshold - calculations. - """ - CREDIT_TYPES_TREATMENT_UNSPECIFIED = 0 - INCLUDE_ALL_CREDITS = 1 - EXCLUDE_ALL_CREDITS = 2 - INCLUDE_SPECIFIED_CREDITS = 3 - - projects: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - resource_ancestors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - credit_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - credit_types_treatment: CreditTypesTreatment = proto.Field( - proto.ENUM, - number=4, - enum=CreditTypesTreatment, - ) - services: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - subaccounts: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - labels: MutableMapping[str, struct_pb2.ListValue] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=6, - message=struct_pb2.ListValue, - ) - calendar_period: 'CalendarPeriod' = proto.Field( - proto.ENUM, - number=8, - oneof='usage_period', - enum='CalendarPeriod', - ) - custom_period: 'CustomPeriod' = proto.Field( - proto.MESSAGE, - number=9, - oneof='usage_period', - message='CustomPeriod', - ) - - -class CustomPeriod(proto.Message): - r"""All date times begin at 12 AM US and Canadian Pacific Time - (UTC-8). - - Attributes: - start_date (google.type.date_pb2.Date): - Required. The start date must be after - January 1, 2017. - end_date (google.type.date_pb2.Date): - Optional. The end date of the time period. Budgets with - elapsed end date won't be processed. If unset, specifies to - track all usage incurred since the start_date. - """ - - start_date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - end_date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=2, - message=date_pb2.Date, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/budget_service.py b/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/budget_service.py deleted file mode 100644 index e9e7de41608d..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/google/cloud/billing/budgets_v1/types/budget_service.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.billing.budgets_v1.types import budget_model -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.billing.budgets.v1', - manifest={ - 'CreateBudgetRequest', - 'UpdateBudgetRequest', - 'GetBudgetRequest', - 'ListBudgetsRequest', - 'ListBudgetsResponse', - 'DeleteBudgetRequest', - }, -) - - -class CreateBudgetRequest(proto.Message): - r"""Request for CreateBudget - - Attributes: - parent (str): - Required. The name of the billing account to create the - budget in. Values are of the form - ``billingAccounts/{billingAccountId}``. - budget (google.cloud.billing.budgets_v1.types.Budget): - Required. Budget to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - budget: budget_model.Budget = proto.Field( - proto.MESSAGE, - number=2, - message=budget_model.Budget, - ) - - -class UpdateBudgetRequest(proto.Message): - r"""Request for UpdateBudget - - Attributes: - budget (google.cloud.billing.budgets_v1.types.Budget): - Required. The updated budget object. - The budget to update is specified by the budget - name in the budget. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Indicates which fields in the provided budget to - update. Read-only fields (such as ``name``) cannot be - changed. If this is not provided, then only fields with - non-default values from the request are updated. See - https://developers.google.com/protocol-buffers/docs/proto3#default - for more details about default values. - """ - - budget: budget_model.Budget = proto.Field( - proto.MESSAGE, - number=1, - message=budget_model.Budget, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetBudgetRequest(proto.Message): - r"""Request for GetBudget - - Attributes: - name (str): - Required. Name of budget to get. Values are of the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBudgetsRequest(proto.Message): - r"""Request for ListBudgets - - Attributes: - parent (str): - Required. Name of billing account to list budgets under. - Values are of the form - ``billingAccounts/{billingAccountId}``. - scope (str): - Optional. Set the scope of the budgets to be - returned, in the format of the resource name. - The scope of a budget is the cost that it - tracks, such as costs for a single project, or - the costs for all projects in a folder. Only - project scope (in the format of - "projects/project-id" or "projects/123") is - supported in this field. When this field is set - to a project's resource name, the budgets - returned are tracking the costs for that - project. - page_size (int): - Optional. The maximum number of budgets to - return per page. The default and maximum value - are 100. - page_token (str): - Optional. The value returned by the last - ``ListBudgetsResponse`` which indicates that this is a - continuation of a prior ``ListBudgets`` call, and that the - system should return the next page of data. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - scope: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListBudgetsResponse(proto.Message): - r"""Response for ListBudgets - - Attributes: - budgets (MutableSequence[google.cloud.billing.budgets_v1.types.Budget]): - List of the budgets owned by the requested - billing account. - next_page_token (str): - If not empty, indicates that there may be more budgets that - match the request; this value should be passed in a new - ``ListBudgetsRequest``. - """ - - @property - def raw_page(self): - return self - - budgets: MutableSequence[budget_model.Budget] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=budget_model.Budget, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteBudgetRequest(proto.Message): - r"""Request for DeleteBudget - - Attributes: - name (str): - Required. Name of the budget to delete. Values are of the - form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/mypy.ini b/owl-bot-staging/google-cloud-billing-budgets/v1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/noxfile.py b/owl-bot-staging/google-cloud-billing-budgets/v1/noxfile.py deleted file mode 100644 index c7cdc31abf2b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-billing-budgets' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/billing/budgets_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/billing/budgets_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_create_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_create_budget_async.py deleted file mode 100644 index dcaea1e519f6..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_create_budget_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_CreateBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -async def sample_create_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1_generated_BudgetService_CreateBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_create_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_create_budget_sync.py deleted file mode 100644 index 417b337e850c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_create_budget_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_CreateBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -def sample_create_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1_generated_BudgetService_CreateBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_delete_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_delete_budget_async.py deleted file mode 100644 index 26bbb71776c1..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_delete_budget_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_DeleteBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -async def sample_delete_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - await client.delete_budget(request=request) - - -# [END billingbudgets_v1_generated_BudgetService_DeleteBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_delete_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_delete_budget_sync.py deleted file mode 100644 index 2e9f72c9122c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_delete_budget_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_DeleteBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -def sample_delete_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - client.delete_budget(request=request) - - -# [END billingbudgets_v1_generated_BudgetService_DeleteBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_get_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_get_budget_async.py deleted file mode 100644 index edb208b79f9b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_get_budget_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_GetBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -async def sample_get_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1_generated_BudgetService_GetBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_get_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_get_budget_sync.py deleted file mode 100644 index ee52c07f2568..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_get_budget_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_GetBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -def sample_get_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = client.get_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1_generated_BudgetService_GetBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_list_budgets_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_list_budgets_async.py deleted file mode 100644 index 04b7dd0867b5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_list_budgets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBudgets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_ListBudgets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -async def sample_list_budgets(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END billingbudgets_v1_generated_BudgetService_ListBudgets_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_list_budgets_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_list_budgets_sync.py deleted file mode 100644 index fea780d09492..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_list_budgets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBudgets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_ListBudgets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -def sample_list_budgets(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END billingbudgets_v1_generated_BudgetService_ListBudgets_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_update_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_update_budget_async.py deleted file mode 100644 index 974b53e1642e..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_update_budget_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_UpdateBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -async def sample_update_budget(): - # Create a client - client = budgets_v1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1.UpdateBudgetRequest( - ) - - # Make the request - response = await client.update_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1_generated_BudgetService_UpdateBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_update_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_update_budget_sync.py deleted file mode 100644 index 02404f64ff67..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/billingbudgets_v1_generated_budget_service_update_budget_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1_generated_BudgetService_UpdateBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1 - - -def sample_update_budget(): - # Create a client - client = budgets_v1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1.UpdateBudgetRequest( - ) - - # Make the request - response = client.update_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1_generated_BudgetService_UpdateBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json deleted file mode 100644 index c24b57dc687a..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ /dev/null @@ -1,830 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.billing.budgets.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-billing-budgets", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient.create_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.CreateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "CreateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.CreateBudgetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "budget", - "type": "google.cloud.billing.budgets_v1.types.Budget" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.types.Budget", - "shortName": "create_budget" - }, - "description": "Sample for CreateBudget", - "file": "billingbudgets_v1_generated_budget_service_create_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_CreateBudget_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_create_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient.create_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.CreateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "CreateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.CreateBudgetRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "budget", - "type": "google.cloud.billing.budgets_v1.types.Budget" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.types.Budget", - "shortName": "create_budget" - }, - "description": "Sample for CreateBudget", - "file": "billingbudgets_v1_generated_budget_service_create_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_CreateBudget_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_create_budget_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient.delete_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.DeleteBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "DeleteBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.DeleteBudgetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_budget" - }, - "description": "Sample for DeleteBudget", - "file": "billingbudgets_v1_generated_budget_service_delete_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_DeleteBudget_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_delete_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient.delete_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.DeleteBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "DeleteBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.DeleteBudgetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_budget" - }, - "description": "Sample for DeleteBudget", - "file": "billingbudgets_v1_generated_budget_service_delete_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_DeleteBudget_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_delete_budget_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient.get_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.GetBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "GetBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.GetBudgetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.types.Budget", - "shortName": "get_budget" - }, - "description": "Sample for GetBudget", - "file": "billingbudgets_v1_generated_budget_service_get_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_GetBudget_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_get_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient.get_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.GetBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "GetBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.GetBudgetRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.types.Budget", - "shortName": "get_budget" - }, - "description": "Sample for GetBudget", - "file": "billingbudgets_v1_generated_budget_service_get_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_GetBudget_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_get_budget_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient.list_budgets", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.ListBudgets", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "ListBudgets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.ListBudgetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.services.budget_service.pagers.ListBudgetsAsyncPager", - "shortName": "list_budgets" - }, - "description": "Sample for ListBudgets", - "file": "billingbudgets_v1_generated_budget_service_list_budgets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_ListBudgets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_list_budgets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient.list_budgets", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.ListBudgets", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "ListBudgets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.ListBudgetsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.services.budget_service.pagers.ListBudgetsPager", - "shortName": "list_budgets" - }, - "description": "Sample for ListBudgets", - "file": "billingbudgets_v1_generated_budget_service_list_budgets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_ListBudgets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_list_budgets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceAsyncClient.update_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.UpdateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "UpdateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.UpdateBudgetRequest" - }, - { - "name": "budget", - "type": "google.cloud.billing.budgets_v1.types.Budget" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.types.Budget", - "shortName": "update_budget" - }, - "description": "Sample for UpdateBudget", - "file": "billingbudgets_v1_generated_budget_service_update_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_UpdateBudget_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_update_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1.BudgetServiceClient.update_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService.UpdateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "UpdateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1.types.UpdateBudgetRequest" - }, - { - "name": "budget", - "type": "google.cloud.billing.budgets_v1.types.Budget" - }, - { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1.types.Budget", - "shortName": "update_budget" - }, - "description": "Sample for UpdateBudget", - "file": "billingbudgets_v1_generated_budget_service_update_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1_generated_BudgetService_UpdateBudget_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1_generated_budget_service_update_budget_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/scripts/fixup_budgets_v1_keywords.py b/owl-bot-staging/google-cloud-billing-budgets/v1/scripts/fixup_budgets_v1_keywords.py deleted file mode 100644 index a4ef41c09acc..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/scripts/fixup_budgets_v1_keywords.py +++ /dev/null @@ -1,180 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class budgetsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_budget': ('parent', 'budget', ), - 'delete_budget': ('name', ), - 'get_budget': ('name', ), - 'list_budgets': ('parent', 'scope', 'page_size', 'page_token', ), - 'update_budget': ('budget', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=budgetsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the budgets client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/setup.py b/owl-bot-staging/google-cloud-billing-budgets/v1/setup.py deleted file mode 100644 index 03c978d368f5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-billing-budgets' - - -description = "Google Cloud Billing Budgets API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/billing/budgets/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/budgets_v1/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/budgets_v1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/budgets_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/budgets_v1/test_budget_service.py b/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/budgets_v1/test_budget_service.py deleted file mode 100644 index 0eb2563a7030..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1/tests/unit/gapic/budgets_v1/test_budget_service.py +++ /dev/null @@ -1,5201 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable, AsyncIterable -from google.protobuf import json_format -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.billing.budgets_v1.services.budget_service import BudgetServiceAsyncClient -from google.cloud.billing.budgets_v1.services.budget_service import BudgetServiceClient -from google.cloud.billing.budgets_v1.services.budget_service import pagers -from google.cloud.billing.budgets_v1.services.budget_service import transports -from google.cloud.billing.budgets_v1.types import budget_model -from google.cloud.billing.budgets_v1.types import budget_service -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import money_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BudgetServiceClient._get_default_mtls_endpoint(None) is None - assert BudgetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BudgetServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BudgetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BudgetServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BudgetServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - BudgetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert BudgetServiceClient._get_client_cert_source(None, False) is None - assert BudgetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BudgetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BudgetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BudgetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - default_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert BudgetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BudgetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - assert BudgetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BudgetServiceClient._get_api_endpoint(None, None, default_universe, "always") == BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - assert BudgetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - assert BudgetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BudgetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - BudgetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert BudgetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BudgetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BudgetServiceClient._get_universe_domain(None, None) == BudgetServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - BudgetServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = BudgetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = BudgetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (BudgetServiceClient, "grpc"), - (BudgetServiceAsyncClient, "grpc_asyncio"), - (BudgetServiceClient, "rest"), -]) -def test_budget_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'billingbudgets.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://billingbudgets.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BudgetServiceGrpcTransport, "grpc"), - (transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.BudgetServiceRestTransport, "rest"), -]) -def test_budget_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BudgetServiceClient, "grpc"), - (BudgetServiceAsyncClient, "grpc_asyncio"), - (BudgetServiceClient, "rest"), -]) -def test_budget_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'billingbudgets.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://billingbudgets.googleapis.com' - ) - - -def test_budget_service_client_get_transport_class(): - transport = BudgetServiceClient.get_transport_class() - available_transports = [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceRestTransport, - ] - assert transport in available_transports - - transport = BudgetServiceClient.get_transport_class("grpc") - assert transport == transports.BudgetServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BudgetServiceClient, transports.BudgetServiceRestTransport, "rest"), -]) -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -def test_budget_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BudgetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BudgetServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", "true"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", "false"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (BudgetServiceClient, transports.BudgetServiceRestTransport, "rest", "true"), - (BudgetServiceClient, transports.BudgetServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_budget_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BudgetServiceClient, BudgetServiceAsyncClient -]) -@mock.patch.object(BudgetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BudgetServiceAsyncClient)) -def test_budget_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - BudgetServiceClient, BudgetServiceAsyncClient -]) -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -def test_budget_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - default_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (BudgetServiceClient, transports.BudgetServiceRestTransport, "rest"), -]) -def test_budget_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", grpc_helpers), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (BudgetServiceClient, transports.BudgetServiceRestTransport, "rest", None), -]) -def test_budget_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_budget_service_client_client_options_from_dict(): - with mock.patch('google.cloud.billing.budgets_v1.services.budget_service.transports.BudgetServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BudgetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", grpc_helpers), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_budget_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "billingbudgets.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="billingbudgets.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - budget_service.CreateBudgetRequest, - dict, -]) -def test_create_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.CreateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_create_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.CreateBudgetRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.CreateBudgetRequest( - parent='parent_value', - ) - -def test_create_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_budget] = mock_rpc - request = {} - client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_budget] = mock_rpc - - request = {} - await client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.CreateBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.CreateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_budget_async_from_dict(): - await test_create_budget_async(request_type=dict) - -def test_create_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.CreateBudgetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.CreateBudgetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - await client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_budget_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_budget( - parent='parent_value', - budget=budget_model.Budget(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].budget - mock_val = budget_model.Budget(name='name_value') - assert arg == mock_val - - -def test_create_budget_flattened_error(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_budget( - budget_service.CreateBudgetRequest(), - parent='parent_value', - budget=budget_model.Budget(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_budget_flattened_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_budget( - parent='parent_value', - budget=budget_model.Budget(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].budget - mock_val = budget_model.Budget(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_budget_flattened_error_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_budget( - budget_service.CreateBudgetRequest(), - parent='parent_value', - budget=budget_model.Budget(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - budget_service.UpdateBudgetRequest, - dict, -]) -def test_update_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.UpdateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_update_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.UpdateBudgetRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.UpdateBudgetRequest( - ) - -def test_update_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_budget] = mock_rpc - request = {} - client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_budget] = mock_rpc - - request = {} - await client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.UpdateBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.UpdateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_budget_async_from_dict(): - await test_update_budget_async(request_type=dict) - -def test_update_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.UpdateBudgetRequest() - - request.budget.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'budget.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.UpdateBudgetRequest() - - request.budget.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - await client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'budget.name=name_value', - ) in kw['metadata'] - - -def test_update_budget_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_budget( - budget=budget_model.Budget(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].budget - mock_val = budget_model.Budget(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - - -def test_update_budget_flattened_error(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_budget( - budget_service.UpdateBudgetRequest(), - budget=budget_model.Budget(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - -@pytest.mark.asyncio -async def test_update_budget_flattened_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_budget( - budget=budget_model.Budget(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].budget - mock_val = budget_model.Budget(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val - -@pytest.mark.asyncio -async def test_update_budget_flattened_error_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_budget( - budget_service.UpdateBudgetRequest(), - budget=budget_model.Budget(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.parametrize("request_type", [ - budget_service.GetBudgetRequest, - dict, -]) -def test_get_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.GetBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_get_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.GetBudgetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.GetBudgetRequest( - name='name_value', - ) - -def test_get_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_budget] = mock_rpc - request = {} - client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_budget] = mock_rpc - - request = {} - await client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.GetBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.GetBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_budget_async_from_dict(): - await test_get_budget_async(request_type=dict) - -def test_get_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.GetBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.GetBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - await client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_budget_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_budget( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_budget_flattened_error(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_budget( - budget_service.GetBudgetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_budget_flattened_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_budget( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_budget_flattened_error_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_budget( - budget_service.GetBudgetRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - budget_service.ListBudgetsRequest, - dict, -]) -def test_list_budgets(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.ListBudgetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBudgetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_budgets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.ListBudgetsRequest( - parent='parent_value', - scope='scope_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_budgets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.ListBudgetsRequest( - parent='parent_value', - scope='scope_value', - page_token='page_token_value', - ) - -def test_list_budgets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_budgets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_budgets] = mock_rpc - request = {} - client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_budgets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_budgets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_budgets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_budgets] = mock_rpc - - request = {} - await client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_budgets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_budgets_async(transport: str = 'grpc_asyncio', request_type=budget_service.ListBudgetsRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.ListBudgetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBudgetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_budgets_async_from_dict(): - await test_list_budgets_async(request_type=dict) - -def test_list_budgets_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.ListBudgetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value = budget_service.ListBudgetsResponse() - client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_budgets_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.ListBudgetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse()) - await client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_budgets_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_service.ListBudgetsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_budgets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_budgets_flattened_error(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_budgets( - budget_service.ListBudgetsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_budgets_flattened_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_service.ListBudgetsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_budgets( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_budgets_flattened_error_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_budgets( - budget_service.ListBudgetsRequest(), - parent='parent_value', - ) - - -def test_list_budgets_pager(transport_name: str = "grpc"): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_budgets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, budget_model.Budget) - for i in results) -def test_list_budgets_pages(transport_name: str = "grpc"): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - pages = list(client.list_budgets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_budgets_async_pager(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_budgets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, budget_model.Budget) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_budgets_async_pages(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_budgets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - budget_service.DeleteBudgetRequest, - dict, -]) -def test_delete_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.DeleteBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.DeleteBudgetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.DeleteBudgetRequest( - name='name_value', - ) - -def test_delete_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_budget] = mock_rpc - request = {} - client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_budget] = mock_rpc - - request = {} - await client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.DeleteBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.DeleteBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_budget_async_from_dict(): - await test_delete_budget_async(request_type=dict) - -def test_delete_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.DeleteBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value = None - client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.DeleteBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_budget_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_budget( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_budget_flattened_error(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_budget( - budget_service.DeleteBudgetRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_budget_flattened_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_budget( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_budget_flattened_error_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_budget( - budget_service.DeleteBudgetRequest(), - name='name_value', - ) - - -def test_create_budget_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_budget] = mock_rpc - - request = {} - client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_budget_rest_required_fields(request_type=budget_service.CreateBudgetRequest): - transport_class = transports.BudgetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.create_budget(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_budget_rest_unset_required_fields(): - transport = transports.BudgetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_budget._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "budget", ))) - - -def test_create_budget_rest_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'billingAccounts/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - budget=budget_model.Budget(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_budget(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=billingAccounts/*}/budgets" % client.transport._host, args[1]) - - -def test_create_budget_rest_flattened_error(transport: str = 'rest'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_budget( - budget_service.CreateBudgetRequest(), - parent='parent_value', - budget=budget_model.Budget(name='name_value'), - ) - - -def test_update_budget_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_budget] = mock_rpc - - request = {} - client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_budget_rest_required_fields(request_type=budget_service.UpdateBudgetRequest): - transport_class = transports.BudgetServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_budget._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_budget(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_budget_rest_unset_required_fields(): - transport = transports.BudgetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_budget._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("budget", ))) - - -def test_update_budget_rest_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget() - - # get arguments that satisfy an http rule for this method - sample_request = {'budget': {'name': 'billingAccounts/sample1/budgets/sample2'}} - - # get truthy value for each flattened field - mock_args = dict( - budget=budget_model.Budget(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_budget(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{budget.name=billingAccounts/*/budgets/*}" % client.transport._host, args[1]) - - -def test_update_budget_rest_flattened_error(transport: str = 'rest'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_budget( - budget_service.UpdateBudgetRequest(), - budget=budget_model.Budget(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_budget_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_budget] = mock_rpc - - request = {} - client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_budget_rest_required_fields(request_type=budget_service.GetBudgetRequest): - transport_class = transports.BudgetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_budget(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_budget_rest_unset_required_fields(): - transport = transports.BudgetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_budget._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_get_budget_rest_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'billingAccounts/sample1/budgets/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_budget(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=billingAccounts/*/budgets/*}" % client.transport._host, args[1]) - - -def test_get_budget_rest_flattened_error(transport: str = 'rest'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_budget( - budget_service.GetBudgetRequest(), - name='name_value', - ) - - -def test_list_budgets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_budgets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_budgets] = mock_rpc - - request = {} - client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_budgets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_budgets_rest_required_fields(request_type=budget_service.ListBudgetsRequest): - transport_class = transports.BudgetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_budgets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_budgets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "scope", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = budget_service.ListBudgetsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_service.ListBudgetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_budgets(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_budgets_rest_unset_required_fields(): - transport = transports.BudgetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_budgets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "scope", )) & set(("parent", ))) - - -def test_list_budgets_rest_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_service.ListBudgetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'billingAccounts/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = budget_service.ListBudgetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_budgets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=billingAccounts/*}/budgets" % client.transport._host, args[1]) - - -def test_list_budgets_rest_flattened_error(transport: str = 'rest'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_budgets( - budget_service.ListBudgetsRequest(), - parent='parent_value', - ) - - -def test_list_budgets_rest_pager(transport: str = 'rest'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(budget_service.ListBudgetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'billingAccounts/sample1'} - - pager = client.list_budgets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, budget_model.Budget) - for i in results) - - pages = list(client.list_budgets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_delete_budget_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_budget] = mock_rpc - - request = {} - client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_budget_rest_required_fields(request_type=budget_service.DeleteBudgetRequest): - transport_class = transports.BudgetServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_budget._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_budget(request) - - expected_params = [ - ('$alt', 'json;enum-encoding=int') - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_budget_rest_unset_required_fields(): - transport = transports.BudgetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_budget._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -def test_delete_budget_rest_flattened(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'billingAccounts/sample1/budgets/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_budget(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=billingAccounts/*/budgets/*}" % client.transport._host, args[1]) - - -def test_delete_budget_rest_flattened_error(transport: str = 'rest'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_budget( - budget_service.DeleteBudgetRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BudgetServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BudgetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceGrpcAsyncIOTransport, - transports.BudgetServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = BudgetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.create_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.CreateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.update_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.UpdateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.get_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.GetBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_budgets_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value = budget_service.ListBudgetsResponse() - client.list_budgets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.ListBudgetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value = None - client.delete_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.DeleteBudgetRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = BudgetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.create_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.CreateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.update_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.UpdateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.get_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.GetBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_budgets_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_budgets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.ListBudgetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.DeleteBudgetRequest() - - assert args[0] == request_msg - - -def test_transport_kind_rest(): - transport = BudgetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" - - -def test_create_budget_rest_bad_request(request_type=budget_service.CreateBudgetRequest): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'billingAccounts/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_budget(request) - - -@pytest.mark.parametrize("request_type", [ - budget_service.CreateBudgetRequest, - dict, -]) -def test_create_budget_rest_call_success(request_type): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'billingAccounts/sample1'} - request_init["budget"] = {'name': 'name_value', 'display_name': 'display_name_value', 'budget_filter': {'projects': ['projects_value1', 'projects_value2'], 'resource_ancestors': ['resource_ancestors_value1', 'resource_ancestors_value2'], 'credit_types': ['credit_types_value1', 'credit_types_value2'], 'credit_types_treatment': 1, 'services': ['services_value1', 'services_value2'], 'subaccounts': ['subaccounts_value1', 'subaccounts_value2'], 'labels': {}, 'calendar_period': 1, 'custom_period': {'start_date': {'year': 433, 'month': 550, 'day': 318}, 'end_date': {}}}, 'amount': {'specified_amount': {'currency_code': 'currency_code_value', 'units': 563, 'nanos': 543}, 'last_period_amount': {}}, 'threshold_rules': [{'threshold_percent': 0.1821, 'spend_basis': 1}], 'notifications_rule': {'pubsub_topic': 'pubsub_topic_value', 'schema_version': 'schema_version_value', 'monitoring_notification_channels': ['monitoring_notification_channels_value1', 'monitoring_notification_channels_value2'], 'disable_default_iam_recipients': True, 'enable_project_level_recipients': True}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = budget_service.CreateBudgetRequest.meta.fields["budget"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["budget"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["budget"][field])): - del request_init["budget"][field][i][subfield] - else: - del request_init["budget"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_budget(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_budget_rest_interceptors(null_interceptor): - transport = transports.BudgetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BudgetServiceRestInterceptor(), - ) - client = BudgetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_create_budget") as post, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_create_budget_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "pre_create_budget") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = budget_service.CreateBudgetRequest.pb(budget_service.CreateBudgetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = budget_model.Budget.to_json(budget_model.Budget()) - req.return_value.content = return_value - - request = budget_service.CreateBudgetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = budget_model.Budget() - post_with_metadata.return_value = budget_model.Budget(), metadata - - client.create_budget(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_update_budget_rest_bad_request(request_type=budget_service.UpdateBudgetRequest): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'budget': {'name': 'billingAccounts/sample1/budgets/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_budget(request) - - -@pytest.mark.parametrize("request_type", [ - budget_service.UpdateBudgetRequest, - dict, -]) -def test_update_budget_rest_call_success(request_type): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'budget': {'name': 'billingAccounts/sample1/budgets/sample2'}} - request_init["budget"] = {'name': 'billingAccounts/sample1/budgets/sample2', 'display_name': 'display_name_value', 'budget_filter': {'projects': ['projects_value1', 'projects_value2'], 'resource_ancestors': ['resource_ancestors_value1', 'resource_ancestors_value2'], 'credit_types': ['credit_types_value1', 'credit_types_value2'], 'credit_types_treatment': 1, 'services': ['services_value1', 'services_value2'], 'subaccounts': ['subaccounts_value1', 'subaccounts_value2'], 'labels': {}, 'calendar_period': 1, 'custom_period': {'start_date': {'year': 433, 'month': 550, 'day': 318}, 'end_date': {}}}, 'amount': {'specified_amount': {'currency_code': 'currency_code_value', 'units': 563, 'nanos': 543}, 'last_period_amount': {}}, 'threshold_rules': [{'threshold_percent': 0.1821, 'spend_basis': 1}], 'notifications_rule': {'pubsub_topic': 'pubsub_topic_value', 'schema_version': 'schema_version_value', 'monitoring_notification_channels': ['monitoring_notification_channels_value1', 'monitoring_notification_channels_value2'], 'disable_default_iam_recipients': True, 'enable_project_level_recipients': True}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = budget_service.UpdateBudgetRequest.meta.fields["budget"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["budget"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["budget"][field])): - del request_init["budget"][field][i][subfield] - else: - del request_init["budget"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_budget(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_budget_rest_interceptors(null_interceptor): - transport = transports.BudgetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BudgetServiceRestInterceptor(), - ) - client = BudgetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_update_budget") as post, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_update_budget_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "pre_update_budget") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = budget_service.UpdateBudgetRequest.pb(budget_service.UpdateBudgetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = budget_model.Budget.to_json(budget_model.Budget()) - req.return_value.content = return_value - - request = budget_service.UpdateBudgetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = budget_model.Budget() - post_with_metadata.return_value = budget_model.Budget(), metadata - - client.update_budget(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_get_budget_rest_bad_request(request_type=budget_service.GetBudgetRequest): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'billingAccounts/sample1/budgets/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_budget(request) - - -@pytest.mark.parametrize("request_type", [ - budget_service.GetBudgetRequest, - dict, -]) -def test_get_budget_rest_call_success(request_type): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'billingAccounts/sample1/budgets/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_model.Budget.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_budget(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_budget_rest_interceptors(null_interceptor): - transport = transports.BudgetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BudgetServiceRestInterceptor(), - ) - client = BudgetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_get_budget") as post, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_get_budget_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "pre_get_budget") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = budget_service.GetBudgetRequest.pb(budget_service.GetBudgetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = budget_model.Budget.to_json(budget_model.Budget()) - req.return_value.content = return_value - - request = budget_service.GetBudgetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = budget_model.Budget() - post_with_metadata.return_value = budget_model.Budget(), metadata - - client.get_budget(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_budgets_rest_bad_request(request_type=budget_service.ListBudgetsRequest): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'parent': 'billingAccounts/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_budgets(request) - - -@pytest.mark.parametrize("request_type", [ - budget_service.ListBudgetsRequest, - dict, -]) -def test_list_budgets_rest_call_success(request_type): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'billingAccounts/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = budget_service.ListBudgetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_budgets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBudgetsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_budgets_rest_interceptors(null_interceptor): - transport = transports.BudgetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BudgetServiceRestInterceptor(), - ) - client = BudgetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_list_budgets") as post, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "post_list_budgets_with_metadata") as post_with_metadata, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "pre_list_budgets") as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = budget_service.ListBudgetsRequest.pb(budget_service.ListBudgetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = budget_service.ListBudgetsResponse.to_json(budget_service.ListBudgetsResponse()) - req.return_value.content = return_value - - request = budget_service.ListBudgetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = budget_service.ListBudgetsResponse() - post_with_metadata.return_value = budget_service.ListBudgetsResponse(), metadata - - client.list_budgets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_budget_rest_bad_request(request_type=budget_service.DeleteBudgetRequest): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {'name': 'billingAccounts/sample1/budgets/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_budget(request) - - -@pytest.mark.parametrize("request_type", [ - budget_service.DeleteBudgetRequest, - dict, -]) -def test_delete_budget_rest_call_success(request_type): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'billingAccounts/sample1/budgets/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = '' - response_value.content = json_return_value.encode('UTF-8') - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_budget(request) - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_budget_rest_interceptors(null_interceptor): - transport = transports.BudgetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BudgetServiceRestInterceptor(), - ) - client = BudgetServiceClient(transport=transport) - - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.BudgetServiceRestInterceptor, "pre_delete_budget") as pre: - pre.assert_not_called() - pb_message = budget_service.DeleteBudgetRequest.pb(budget_service.DeleteBudgetRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - request = budget_service.DeleteBudgetRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_budget(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - -def test_initialize_client_w_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_budget_empty_call_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - client.create_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.CreateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_budget_empty_call_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - client.update_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.UpdateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_budget_empty_call_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - client.get_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.GetBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_budgets_empty_call_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - client.list_budgets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.ListBudgetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_budget_empty_call_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - client.delete_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.DeleteBudgetRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BudgetServiceGrpcTransport, - ) - -def test_budget_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BudgetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_budget_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.billing.budgets_v1.services.budget_service.transports.BudgetServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BudgetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_budget', - 'update_budget', - 'get_budget', - 'list_budgets', - 'delete_budget', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_budget_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.billing.budgets_v1.services.budget_service.transports.BudgetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BudgetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_budget_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.billing.budgets_v1.services.budget_service.transports.BudgetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BudgetServiceTransport() - adc.assert_called_once() - - -def test_budget_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BudgetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceGrpcAsyncIOTransport, - ], -) -def test_budget_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-billing', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceGrpcAsyncIOTransport, - transports.BudgetServiceRestTransport, - ], -) -def test_budget_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BudgetServiceGrpcTransport, grpc_helpers), - (transports.BudgetServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_budget_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "billingbudgets.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="billingbudgets.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BudgetServiceGrpcTransport, transports.BudgetServiceGrpcAsyncIOTransport]) -def test_budget_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_budget_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.BudgetServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_budget_service_host_no_port(transport_name): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='billingbudgets.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'billingbudgets.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://billingbudgets.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_budget_service_host_with_port(transport_name): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='billingbudgets.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'billingbudgets.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://billingbudgets.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_budget_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = BudgetServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = BudgetServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_budget._session - session2 = client2.transport.create_budget._session - assert session1 != session2 - session1 = client1.transport.update_budget._session - session2 = client2.transport.update_budget._session - assert session1 != session2 - session1 = client1.transport.get_budget._session - session2 = client2.transport.get_budget._session - assert session1 != session2 - session1 = client1.transport.list_budgets._session - session2 = client2.transport.list_budgets._session - assert session1 != session2 - session1 = client1.transport.delete_budget._session - session2 = client2.transport.delete_budget._session - assert session1 != session2 -def test_budget_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BudgetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_budget_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BudgetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BudgetServiceGrpcTransport, transports.BudgetServiceGrpcAsyncIOTransport]) -def test_budget_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BudgetServiceGrpcTransport, transports.BudgetServiceGrpcAsyncIOTransport]) -def test_budget_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_budget_path(): - billing_account = "squid" - budget = "clam" - expected = "billingAccounts/{billing_account}/budgets/{budget}".format(billing_account=billing_account, budget=budget, ) - actual = BudgetServiceClient.budget_path(billing_account, budget) - assert expected == actual - - -def test_parse_budget_path(): - expected = { - "billing_account": "whelk", - "budget": "octopus", - } - path = BudgetServiceClient.budget_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_budget_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BudgetServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = BudgetServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = BudgetServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = BudgetServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BudgetServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = BudgetServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = BudgetServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = BudgetServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BudgetServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = BudgetServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BudgetServiceTransport, '_prep_wrapped_messages') as prep: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BudgetServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = BudgetServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close_rest(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest" - ) - with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/.coveragerc b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/.coveragerc deleted file mode 100644 index cbbb98d2731b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/billing/budgets/__init__.py - google/cloud/billing/budgets/gapic_version.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/.flake8 b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/MANIFEST.in b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/MANIFEST.in deleted file mode 100644 index ec7f10766a5d..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/billing/budgets *.py -recursive-include google/cloud/billing/budgets_v1beta1 *.py diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/README.rst b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/README.rst deleted file mode 100644 index a896ef3c45ba..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/README.rst +++ /dev/null @@ -1,143 +0,0 @@ -Python Client for Google Cloud Billing Budgets API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Billing Budgets API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library - - -Logging -------- - -This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. -Note the following: - -#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. -#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. -#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. - - -Simple, environment-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google -logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged -messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging -event. - -A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. - -- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. -- Invalid logging scopes: :code:`foo`, :code:`123`, etc. - -**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. - - -Examples -^^^^^^^^ - -- Enabling the default handler for all Google-based loggers - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google - -- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: console - - export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 - - -Advanced, code-based configuration -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can also configure a valid logging scope using Python's standard `logging` mechanism. - - -Examples -^^^^^^^^ - -- Configuring a handler for all Google-based loggers - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - -- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): - -.. code-block:: python - - import logging - - from google.cloud.translate_v3 import translate - - base_logger = logging.getLogger("google.cloud.library_v1") - base_logger.addHandler(logging.StreamHandler()) - base_logger.setLevel(logging.DEBUG) - - -Logging details -~~~~~~~~~~~~~~~ - -#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root - logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set - :code:`logging.getLogger("google").propagate = True` in your code. -#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for - one library, but decide you need to also set up environment-based logging configuration for another library. - - #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual - if the code -based configuration gets applied first. - -#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get - executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. - (This is the reason for 2.i. above.) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/_static/custom.css b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/_static/custom.css deleted file mode 100644 index 06423be0b592..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/_static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -dl.field-list > dt { - min-width: 100px -} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/budget_service.rst b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/budget_service.rst deleted file mode 100644 index 100afafd2a20..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/budget_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -BudgetService -------------------------------- - -.. automodule:: google.cloud.billing.budgets_v1beta1.services.budget_service - :members: - :inherited-members: - -.. automodule:: google.cloud.billing.budgets_v1beta1.services.budget_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/services_.rst b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/services_.rst deleted file mode 100644 index 418cbb1977b7..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/services_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Billing Budgets v1beta1 API -===================================================== -.. toctree:: - :maxdepth: 2 - - budget_service diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/types_.rst b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/types_.rst deleted file mode 100644 index ecd04b4311d3..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/budgets_v1beta1/types_.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Billing Budgets v1beta1 API -================================================== - -.. automodule:: google.cloud.billing.budgets_v1beta1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/conf.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/conf.py deleted file mode 100644 index 7299e0cc4153..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-billing-budgets documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-billing-budgets" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Billing Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-billing-budgets-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-billing-budgets.tex", - u"google-cloud-billing-budgets Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-billing-budgets", - u"Google Cloud Billing Budgets Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-billing-budgets", - u"google-cloud-billing-budgets Documentation", - author, - "google-cloud-billing-budgets", - "GAPIC library for Google Cloud Billing Budgets API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/index.rst b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/index.rst deleted file mode 100644 index 0deb480c722f..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - budgets_v1beta1/services_ - budgets_v1beta1/types_ diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/__init__.py deleted file mode 100644 index 083f04e00e4c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.billing.budgets import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.billing.budgets_v1beta1.services.budget_service.client import BudgetServiceClient -from google.cloud.billing.budgets_v1beta1.services.budget_service.async_client import BudgetServiceAsyncClient - -from google.cloud.billing.budgets_v1beta1.types.budget_model import AllUpdatesRule -from google.cloud.billing.budgets_v1beta1.types.budget_model import Budget -from google.cloud.billing.budgets_v1beta1.types.budget_model import BudgetAmount -from google.cloud.billing.budgets_v1beta1.types.budget_model import CustomPeriod -from google.cloud.billing.budgets_v1beta1.types.budget_model import Filter -from google.cloud.billing.budgets_v1beta1.types.budget_model import LastPeriodAmount -from google.cloud.billing.budgets_v1beta1.types.budget_model import ThresholdRule -from google.cloud.billing.budgets_v1beta1.types.budget_model import CalendarPeriod -from google.cloud.billing.budgets_v1beta1.types.budget_service import CreateBudgetRequest -from google.cloud.billing.budgets_v1beta1.types.budget_service import DeleteBudgetRequest -from google.cloud.billing.budgets_v1beta1.types.budget_service import GetBudgetRequest -from google.cloud.billing.budgets_v1beta1.types.budget_service import ListBudgetsRequest -from google.cloud.billing.budgets_v1beta1.types.budget_service import ListBudgetsResponse -from google.cloud.billing.budgets_v1beta1.types.budget_service import UpdateBudgetRequest - -__all__ = ('BudgetServiceClient', - 'BudgetServiceAsyncClient', - 'AllUpdatesRule', - 'Budget', - 'BudgetAmount', - 'CustomPeriod', - 'Filter', - 'LastPeriodAmount', - 'ThresholdRule', - 'CalendarPeriod', - 'CreateBudgetRequest', - 'DeleteBudgetRequest', - 'GetBudgetRequest', - 'ListBudgetsRequest', - 'ListBudgetsResponse', - 'UpdateBudgetRequest', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/gapic_version.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/py.typed b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/py.typed deleted file mode 100644 index b067b1963087..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-billing-budgets package uses inline types. diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/__init__.py deleted file mode 100644 index ceb50ba87adb..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/__init__.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.billing.budgets_v1beta1 import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.budget_service import BudgetServiceClient -from .services.budget_service import BudgetServiceAsyncClient - -from .types.budget_model import AllUpdatesRule -from .types.budget_model import Budget -from .types.budget_model import BudgetAmount -from .types.budget_model import CustomPeriod -from .types.budget_model import Filter -from .types.budget_model import LastPeriodAmount -from .types.budget_model import ThresholdRule -from .types.budget_model import CalendarPeriod -from .types.budget_service import CreateBudgetRequest -from .types.budget_service import DeleteBudgetRequest -from .types.budget_service import GetBudgetRequest -from .types.budget_service import ListBudgetsRequest -from .types.budget_service import ListBudgetsResponse -from .types.budget_service import UpdateBudgetRequest - -__all__ = ( - 'BudgetServiceAsyncClient', -'AllUpdatesRule', -'Budget', -'BudgetAmount', -'BudgetServiceClient', -'CalendarPeriod', -'CreateBudgetRequest', -'CustomPeriod', -'DeleteBudgetRequest', -'Filter', -'GetBudgetRequest', -'LastPeriodAmount', -'ListBudgetsRequest', -'ListBudgetsResponse', -'ThresholdRule', -'UpdateBudgetRequest', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/gapic_metadata.json b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/gapic_metadata.json deleted file mode 100644 index b510f92571ec..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,73 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.billing.budgets_v1beta1", - "protoPackage": "google.cloud.billing.budgets.v1beta1", - "schema": "1.0", - "services": { - "BudgetService": { - "clients": { - "grpc": { - "libraryClient": "BudgetServiceClient", - "rpcs": { - "CreateBudget": { - "methods": [ - "create_budget" - ] - }, - "DeleteBudget": { - "methods": [ - "delete_budget" - ] - }, - "GetBudget": { - "methods": [ - "get_budget" - ] - }, - "ListBudgets": { - "methods": [ - "list_budgets" - ] - }, - "UpdateBudget": { - "methods": [ - "update_budget" - ] - } - } - }, - "grpc-async": { - "libraryClient": "BudgetServiceAsyncClient", - "rpcs": { - "CreateBudget": { - "methods": [ - "create_budget" - ] - }, - "DeleteBudget": { - "methods": [ - "delete_budget" - ] - }, - "GetBudget": { - "methods": [ - "get_budget" - ] - }, - "ListBudgets": { - "methods": [ - "list_budgets" - ] - }, - "UpdateBudget": { - "methods": [ - "update_budget" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/gapic_version.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/gapic_version.py deleted file mode 100644 index 558c8aab67c5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/py.typed b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/py.typed deleted file mode 100644 index b067b1963087..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-billing-budgets package uses inline types. diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/__init__.py deleted file mode 100644 index 8f6cf068242c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/__init__.py deleted file mode 100644 index e50c92a20ec7..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import BudgetServiceClient -from .async_client import BudgetServiceAsyncClient - -__all__ = ( - 'BudgetServiceClient', - 'BudgetServiceAsyncClient', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py deleted file mode 100644 index 432f05c4ed3c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/async_client.py +++ /dev/null @@ -1,728 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import logging as std_logging -from collections import OrderedDict -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.billing.budgets_v1beta1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - - -try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore - -from google.cloud.billing.budgets_v1beta1.services.budget_service import pagers -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service -from .transports.base import BudgetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import BudgetServiceGrpcAsyncIOTransport -from .client import BudgetServiceClient - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -class BudgetServiceAsyncClient: - """BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - """ - - _client: BudgetServiceClient - - # Copy defaults from the synchronous client for use here. - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = BudgetServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - _DEFAULT_ENDPOINT_TEMPLATE = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE - _DEFAULT_UNIVERSE = BudgetServiceClient._DEFAULT_UNIVERSE - - budget_path = staticmethod(BudgetServiceClient.budget_path) - parse_budget_path = staticmethod(BudgetServiceClient.parse_budget_path) - common_billing_account_path = staticmethod(BudgetServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(BudgetServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(BudgetServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(BudgetServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(BudgetServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(BudgetServiceClient.parse_common_organization_path) - common_project_path = staticmethod(BudgetServiceClient.common_project_path) - parse_common_project_path = staticmethod(BudgetServiceClient.parse_common_project_path) - common_location_path = staticmethod(BudgetServiceClient.common_location_path) - parse_common_location_path = staticmethod(BudgetServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceAsyncClient: The constructed client. - """ - return BudgetServiceClient.from_service_account_info.__func__(BudgetServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceAsyncClient: The constructed client. - """ - return BudgetServiceClient.from_service_account_file.__func__(BudgetServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return BudgetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> BudgetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BudgetServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._client._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used - by the client instance. - """ - return self._client._universe_domain - - get_transport_class = BudgetServiceClient.get_transport_class - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BudgetServiceTransport, Callable[..., BudgetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the budget service async client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BudgetServiceTransport,Callable[..., BudgetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport to use. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BudgetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = BudgetServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient`.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "credentialsType": None, - } - ) - - async def create_budget(self, - request: Optional[Union[budget_service.CreateBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - async def sample_create_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1beta1.types.CreateBudgetRequest, dict]]): - The request object. Request for CreateBudget - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.CreateBudgetRequest): - request = budget_service.CreateBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.create_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_budget(self, - request: Optional[Union[budget_service.UpdateBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - async def sample_update_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.UpdateBudgetRequest( - ) - - # Make the request - response = await client.update_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1beta1.types.UpdateBudgetRequest, dict]]): - The request object. Request for UpdateBudget - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.UpdateBudgetRequest): - request = budget_service.UpdateBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.update_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("budget.name", request.budget.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_budget(self, - request: Optional[Union[budget_service.GetBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - async def sample_get_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1beta1.types.GetBudgetRequest, dict]]): - The request object. Request for GetBudget - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.GetBudgetRequest): - request = budget_service.GetBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.get_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_budgets(self, - request: Optional[Union[budget_service.ListBudgetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBudgetsAsyncPager: - r"""Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - async def sample_list_budgets(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1beta1.types.ListBudgetsRequest, dict]]): - The request object. Request for ListBudgets - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.services.budget_service.pagers.ListBudgetsAsyncPager: - Response for ListBudgets - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.ListBudgetsRequest): - request = budget_service.ListBudgetsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.list_budgets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListBudgetsAsyncPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_budget(self, - request: Optional[Union[budget_service.DeleteBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a budget. Returns successfully if already - deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - async def sample_delete_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - await client.delete_budget(request=request) - - Args: - request (Optional[Union[google.cloud.billing.budgets_v1beta1.types.DeleteBudgetRequest, dict]]): - The request object. Request for DeleteBudget - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.DeleteBudgetRequest): - request = budget_service.DeleteBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport.delete_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self) -> "BudgetServiceAsyncClient": - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BudgetServiceAsyncClient", -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py deleted file mode 100644 index d3fe11b20e2c..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/client.py +++ /dev/null @@ -1,1088 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from http import HTTPStatus -import json -import logging as std_logging -import os -import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import warnings - -from google.cloud.billing.budgets_v1beta1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - -from google.cloud.billing.budgets_v1beta1.services.budget_service import pagers -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service -from .transports.base import BudgetServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import BudgetServiceGrpcTransport -from .transports.grpc_asyncio import BudgetServiceGrpcAsyncIOTransport - - -class BudgetServiceClientMeta(type): - """Metaclass for the BudgetService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[BudgetServiceTransport]] - _transport_registry["grpc"] = BudgetServiceGrpcTransport - _transport_registry["grpc_asyncio"] = BudgetServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[BudgetServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class BudgetServiceClient(metaclass=BudgetServiceClientMeta): - """BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. - DEFAULT_ENDPOINT = "billingbudgets.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - _DEFAULT_ENDPOINT_TEMPLATE = "billingbudgets.{UNIVERSE_DOMAIN}" - _DEFAULT_UNIVERSE = "googleapis.com" - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - BudgetServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> BudgetServiceTransport: - """Returns the transport used by the client instance. - - Returns: - BudgetServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def budget_path(billing_account: str,budget: str,) -> str: - """Returns a fully-qualified budget string.""" - return "billingAccounts/{billing_account}/budgets/{budget}".format(billing_account=billing_account, budget=budget, ) - - @staticmethod - def parse_budget_path(path: str) -> Dict[str,str]: - """Parses a budget path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)/budgets/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Deprecated. Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - @staticmethod - def _read_environment_variables(): - """Returns the environment variables used by the client. - - Returns: - Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, - GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. - - Raises: - ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not - any of ["true", "false"]. - google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT - is not any of ["auto", "never", "always"]. - """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() - universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env - - @staticmethod - def _get_client_cert_source(provided_cert_source, use_cert_flag): - """Return the client cert source to be used by the client. - - Args: - provided_cert_source (bytes): The client certificate source provided. - use_cert_flag (bool): A flag indicating whether to use the client certificate. - - Returns: - bytes or None: The client cert source to be used by the client. - """ - client_cert_source = None - if use_cert_flag: - if provided_cert_source: - client_cert_source = provided_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - return client_cert_source - - @staticmethod - def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): - """Return the API endpoint used by the client. - - Args: - api_override (str): The API endpoint override. If specified, this is always - the return value of this function and the other arguments are not used. - client_cert_source (bytes): The client certificate source used by the client. - universe_domain (str): The universe domain used by the client. - use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. - Possible values are "always", "auto", or "never". - - Returns: - str: The API endpoint to be used by the client. - """ - if api_override is not None: - api_endpoint = api_override - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - _default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - if universe_domain != _default_universe: - raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") - api_endpoint = BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) - return api_endpoint - - @staticmethod - def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: - """Return the universe domain used by the client. - - Args: - client_universe_domain (Optional[str]): The universe domain configured via the client options. - universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. - - Returns: - str: The universe domain to be used by the client. - - Raises: - ValueError: If the universe domain is an empty string. - """ - universe_domain = BudgetServiceClient._DEFAULT_UNIVERSE - if client_universe_domain is not None: - universe_domain = client_universe_domain - elif universe_domain_env is not None: - universe_domain = universe_domain_env - if len(universe_domain.strip()) == 0: - raise ValueError("Universe Domain cannot be an empty string.") - return universe_domain - - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. - - Raises: - ValueError: If the configured universe domain is not valid. - """ - - # NOTE (b/349488459): universe validation is disabled until further notice. - return True - - def _add_cred_info_for_auth_errors( - self, - error: core_exceptions.GoogleAPICallError - ) -> None: - """Adds credential info string to error details for 401/403/404 errors. - - Args: - error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. - """ - if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: - return - - cred = self._transport._credentials - - # get_cred_info is only available in google-auth>=2.35.0 - if not hasattr(cred, "get_cred_info"): - return - - # ignore the type check since pypy test fails when get_cred_info - # is not available - cred_info = cred.get_cred_info() # type: ignore - if cred_info and hasattr(error._details, "append"): - error._details.append(json.dumps(cred_info)) - - @property - def api_endpoint(self): - """Return the API endpoint used by the client instance. - - Returns: - str: The API endpoint used by the client instance. - """ - return self._api_endpoint - - @property - def universe_domain(self) -> str: - """Return the universe domain used by the client instance. - - Returns: - str: The universe domain used by the client instance. - """ - return self._universe_domain - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BudgetServiceTransport, Callable[..., BudgetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the budget service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Optional[Union[str,BudgetServiceTransport,Callable[..., BudgetServiceTransport]]]): - The transport to use, or a Callable that constructs and returns a new transport. - If a Callable is given, it will be called with the same set of initialization - arguments as used in the BudgetServiceTransport constructor. - If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): - Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the - default endpoint provided by the client when ``transport`` is - not explicitly provided. Only if this property is not set and - ``transport`` was not explicitly provided, the endpoint is - determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment - variable, which have one of the following values: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto-switch to the - default mTLS endpoint if client certificate is present; this is - the default value). - - 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide a client certificate for mTLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - 3. The ``universe_domain`` property can be used to override the - default "googleapis.com" universe. Note that the ``api_endpoint`` - property still takes precedence; and ``universe_domain`` is - currently not supported for mTLS. - - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client_options = client_options - if isinstance(self._client_options, dict): - self._client_options = client_options_lib.from_dict(self._client_options) - if self._client_options is None: - self._client_options = client_options_lib.ClientOptions() - self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) - - self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BudgetServiceClient._read_environment_variables() - self._client_cert_source = BudgetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._universe_domain = BudgetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` - - # Initialize the universe domain validation. - self._is_universe_domain_valid = False - - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER - # Setup logging. - client_logging.initialize_logging() - - api_key_value = getattr(self._client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - transport_provided = isinstance(transport, BudgetServiceTransport) - if transport_provided: - # transport is a BudgetServiceTransport instance. - if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if self._client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = cast(BudgetServiceTransport, transport) - self._api_endpoint = self._transport.host - - self._api_endpoint = (self._api_endpoint or - BudgetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) - - if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - transport_init: Union[Type[BudgetServiceTransport], Callable[..., BudgetServiceTransport]] = ( - BudgetServiceClient.get_transport_class(transport) - if isinstance(transport, str) or transport is None - else cast(Callable[..., BudgetServiceTransport], transport) - ) - # initialize with the provided callable or the passed in class - self._transport = transport_init( - credentials=credentials, - credentials_file=self._client_options.credentials_file, - host=self._api_endpoint, - scopes=self._client_options.scopes, - client_cert_source_for_mtls=self._client_cert_source, - quota_project_id=self._client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=self._client_options.api_audience, - ) - - if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER - _LOGGER.debug( - "Created client `google.cloud.billing.budgets_v1beta1.BudgetServiceClient`.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "credentialsType": None, - } - ) - - def create_budget(self, - request: Optional[Union[budget_service.CreateBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - def sample_create_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1beta1.types.CreateBudgetRequest, dict]): - The request object. Request for CreateBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.CreateBudgetRequest): - request = budget_service.CreateBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_budget(self, - request: Optional[Union[budget_service.UpdateBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - def sample_update_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.UpdateBudgetRequest( - ) - - # Make the request - response = client.update_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1beta1.types.UpdateBudgetRequest, dict]): - The request object. Request for UpdateBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.UpdateBudgetRequest): - request = budget_service.UpdateBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("budget.name", request.budget.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_budget(self, - request: Optional[Union[budget_service.GetBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> budget_model.Budget: - r"""Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - def sample_get_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = client.get_budget(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1beta1.types.GetBudgetRequest, dict]): - The request object. Request for GetBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.types.Budget: - A budget is a plan that describes - what you expect to spend on Cloud - projects, plus the rules to execute as - spend is tracked against that plan, (for - example, send an alert when 90% of the - target spend is met). The budget time - period is configurable, with options - such as month (default), quarter, year, - or custom time period. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.GetBudgetRequest): - request = budget_service.GetBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_budgets(self, - request: Optional[Union[budget_service.ListBudgetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBudgetsPager: - r"""Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - def sample_list_budgets(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.billing.budgets_v1beta1.types.ListBudgetsRequest, dict]): - The request object. Request for ListBudgets - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.cloud.billing.budgets_v1beta1.services.budget_service.pagers.ListBudgetsPager: - Response for ListBudgets - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.ListBudgetsRequest): - request = budget_service.ListBudgetsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_budgets] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListBudgetsPager( - method=rpc, - request=request, - response=response, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_budget(self, - request: Optional[Union[budget_service.DeleteBudgetRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: - r"""Deletes a budget. Returns successfully if already - deleted. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.billing import budgets_v1beta1 - - def sample_delete_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - client.delete_budget(request=request) - - Args: - request (Union[google.cloud.billing.budgets_v1beta1.types.DeleteBudgetRequest, dict]): - The request object. Request for DeleteBudget - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - # Create or coerce a protobuf request object. - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, budget_service.DeleteBudgetRequest): - request = budget_service.DeleteBudgetRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_budget] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self) -> "BudgetServiceClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "BudgetServiceClient", -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/pagers.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/pagers.py deleted file mode 100644 index 7bda15956c41..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/pagers.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import retry_async as retries_async -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore - -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service - - -class ListBudgetsPager: - """A pager for iterating through ``list_budgets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``budgets`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListBudgets`` requests and continue to iterate - through the ``budgets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., budget_service.ListBudgetsResponse], - request: budget_service.ListBudgetsRequest, - response: budget_service.ListBudgetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.billing.budgets_v1beta1.types.ListBudgetsRequest): - The initial request object. - response (google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse): - The initial response object. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = budget_service.ListBudgetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[budget_service.ListBudgetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[budget_model.Budget]: - for page in self.pages: - yield from page.budgets - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListBudgetsAsyncPager: - """A pager for iterating through ``list_budgets`` requests. - - This class thinly wraps an initial - :class:`google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``budgets`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListBudgets`` requests and continue to iterate - through the ``budgets`` field on the - corresponding responses. - - All the usual :class:`google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[budget_service.ListBudgetsResponse]], - request: budget_service.ListBudgetsRequest, - response: budget_service.ListBudgetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.billing.budgets_v1beta1.types.ListBudgetsRequest): - The initial request object. - response (google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse): - The initial response object. - retry (google.api_core.retry.AsyncRetry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - """ - self._method = method - self._request = budget_service.ListBudgetsRequest(request) - self._response = response - self._retry = retry - self._timeout = timeout - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[budget_service.ListBudgetsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[budget_model.Budget]: - async def async_generator(): - async for page in self.pages: - for response in page.budgets: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/README.rst b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/README.rst deleted file mode 100644 index 2dbfa440f9fa..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/README.rst +++ /dev/null @@ -1,9 +0,0 @@ - -transport inheritance structure -_______________________________ - -`BudgetServiceTransport` is the ABC for all transports. -- public child `BudgetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). -- public child `BudgetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). -- private child `_BaseBudgetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). -- public child `BudgetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/__init__.py deleted file mode 100644 index 4dcc16ae221b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import BudgetServiceTransport -from .grpc import BudgetServiceGrpcTransport -from .grpc_asyncio import BudgetServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[BudgetServiceTransport]] -_transport_registry['grpc'] = BudgetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = BudgetServiceGrpcAsyncIOTransport - -__all__ = ( - 'BudgetServiceTransport', - 'BudgetServiceGrpcTransport', - 'BudgetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/base.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/base.py deleted file mode 100644 index e11aeba455e2..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/base.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.billing.budgets_v1beta1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class BudgetServiceTransport(abc.ABC): - """Abstract transport class for BudgetService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'billingbudgets.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - if not hasattr(self, "_ignore_credentials"): - self._ignore_credentials: bool = False - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None and not self._ignore_credentials: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - @property - def host(self): - return self._host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_budget: gapic_v1.method.wrap_method( - self.create_budget, - default_timeout=60.0, - client_info=client_info, - ), - self.update_budget: gapic_v1.method.wrap_method( - self.update_budget, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_budget: gapic_v1.method.wrap_method( - self.get_budget, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_budgets: gapic_v1.method.wrap_method( - self.list_budgets, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_budget: gapic_v1.method.wrap_method( - self.delete_budget, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - Union[ - budget_model.Budget, - Awaitable[budget_model.Budget] - ]]: - raise NotImplementedError() - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - Union[ - budget_model.Budget, - Awaitable[budget_model.Budget] - ]]: - raise NotImplementedError() - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - Union[ - budget_model.Budget, - Awaitable[budget_model.Budget] - ]]: - raise NotImplementedError() - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - Union[ - budget_service.ListBudgetsResponse, - Awaitable[budget_service.ListBudgetsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'BudgetServiceTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py deleted file mode 100644 index 0609e9e5a9a1..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc.py +++ /dev/null @@ -1,476 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import json -import logging as std_logging -import pickle -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore - -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore -from .base import BudgetServiceTransport, DEFAULT_CLIENT_INFO - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER - def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "rpcName": client_call_details.method, - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - - response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = response.result() - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response for {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "rpcName": client_call_details.method, - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BudgetServiceGrpcTransport(BudgetServiceTransport): - """gRPC backend transport for BudgetService. - - BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if a ``channel`` instance is provided. - channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, grpc.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) - - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - budget_model.Budget]: - r"""Return a callable for the create budget method over gRPC. - - Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - Returns: - Callable[[~.CreateBudgetRequest], - ~.Budget]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_budget' not in self._stubs: - self._stubs['create_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/CreateBudget', - request_serializer=budget_service.CreateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['create_budget'] - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - budget_model.Budget]: - r"""Return a callable for the update budget method over gRPC. - - Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - Returns: - Callable[[~.UpdateBudgetRequest], - ~.Budget]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_budget' not in self._stubs: - self._stubs['update_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/UpdateBudget', - request_serializer=budget_service.UpdateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['update_budget'] - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - budget_model.Budget]: - r"""Return a callable for the get budget method over gRPC. - - Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.GetBudgetRequest], - ~.Budget]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_budget' not in self._stubs: - self._stubs['get_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/GetBudget', - request_serializer=budget_service.GetBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['get_budget'] - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - budget_service.ListBudgetsResponse]: - r"""Return a callable for the list budgets method over gRPC. - - Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.ListBudgetsRequest], - ~.ListBudgetsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_budgets' not in self._stubs: - self._stubs['list_budgets'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/ListBudgets', - request_serializer=budget_service.ListBudgetsRequest.serialize, - response_deserializer=budget_service.ListBudgetsResponse.deserialize, - ) - return self._stubs['list_budgets'] - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete budget method over gRPC. - - Deletes a budget. Returns successfully if already - deleted. - - Returns: - Callable[[~.DeleteBudgetRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_budget' not in self._stubs: - self._stubs['delete_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/DeleteBudget', - request_serializer=budget_service.DeleteBudgetRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_budget'] - - def close(self): - self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'BudgetServiceGrpcTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py deleted file mode 100644 index fb5f8ac881b5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/services/budget_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,556 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import inspect -import json -import pickle -import logging as std_logging -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import exceptions as core_exceptions -from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.protobuf.json_format import MessageToJson -import google.protobuf.message - -import grpc # type: ignore -import proto # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service -from google.protobuf import empty_pb2 # type: ignore -from .base import BudgetServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import BudgetServiceGrpcTransport - -try: - from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER -except ImportError: # pragma: NO COVER - CLIENT_LOGGING_SUPPORTED = False - -_LOGGER = std_logging.getLogger(__name__) - - -class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER - async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "rpcName": str(client_call_details.method), - "request": grpc_request, - "metadata": grpc_request["metadata"], - }, - ) - response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER - response_metadata = await response.trailing_metadata() - # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None - result = await response - if isinstance(result, proto.Message): - response_payload = type(result).to_json(result) - elif isinstance(result, google.protobuf.message.Message): - response_payload = MessageToJson(result) - else: - response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" - grpc_response = { - "payload": response_payload, - "metadata": metadata, - "status": "OK", - } - _LOGGER.debug( - f"Received response to rpc {client_call_details.method}.", - extra = { - "serviceName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "rpcName": str(client_call_details.method), - "response": grpc_response, - "metadata": grpc_response["metadata"], - }, - ) - return response - - -class BudgetServiceGrpcAsyncIOTransport(BudgetServiceTransport): - """gRPC AsyncIO backend transport for BudgetService. - - BudgetService stores Cloud Billing budgets, which define a - budget plan and rules to execute as we track spend against that - plan. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'billingbudgets.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to (default: 'billingbudgets.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if a ``channel`` instance is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): - A ``Channel`` instance through which to make calls, or a Callable - that constructs and returns one. If set to None, ``self.create_channel`` - is used to create the channel. If a Callable is given, it will be called - with the same arguments as used in ``self.create_channel``. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if a ``channel`` instance is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if isinstance(channel, aio.Channel): - # Ignore credentials if a channel was passed. - credentials = None - self._ignore_credentials = True - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - # initialize with the provided callable or the default channel - channel_init = channel or type(self).create_channel - self._grpc_channel = channel_init( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._interceptor = _LoggingClientAIOInterceptor() - self._grpc_channel._unary_unary_interceptors.append(self._interceptor) - self._logged_channel = self._grpc_channel - self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters - # Wrap messages. This must be done after self._logged_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_budget(self) -> Callable[ - [budget_service.CreateBudgetRequest], - Awaitable[budget_model.Budget]]: - r"""Return a callable for the create budget method over gRPC. - - Creates a new budget. See `Quotas and - limits `__ for more - information on the limits of the number of budgets you can - create. - - Returns: - Callable[[~.CreateBudgetRequest], - Awaitable[~.Budget]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_budget' not in self._stubs: - self._stubs['create_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/CreateBudget', - request_serializer=budget_service.CreateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['create_budget'] - - @property - def update_budget(self) -> Callable[ - [budget_service.UpdateBudgetRequest], - Awaitable[budget_model.Budget]]: - r"""Return a callable for the update budget method over gRPC. - - Updates a budget and returns the updated budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. Budget - fields that are not exposed in this API will not be - changed by this method. - - Returns: - Callable[[~.UpdateBudgetRequest], - Awaitable[~.Budget]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_budget' not in self._stubs: - self._stubs['update_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/UpdateBudget', - request_serializer=budget_service.UpdateBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['update_budget'] - - @property - def get_budget(self) -> Callable[ - [budget_service.GetBudgetRequest], - Awaitable[budget_model.Budget]]: - r"""Return a callable for the get budget method over gRPC. - - Returns a budget. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.GetBudgetRequest], - Awaitable[~.Budget]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_budget' not in self._stubs: - self._stubs['get_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/GetBudget', - request_serializer=budget_service.GetBudgetRequest.serialize, - response_deserializer=budget_model.Budget.deserialize, - ) - return self._stubs['get_budget'] - - @property - def list_budgets(self) -> Callable[ - [budget_service.ListBudgetsRequest], - Awaitable[budget_service.ListBudgetsResponse]]: - r"""Return a callable for the list budgets method over gRPC. - - Returns a list of budgets for a billing account. - - WARNING: There are some fields exposed on the Google - Cloud Console that aren't available on this API. When - reading from the API, you will not see these fields in - the return value, though they may have been set in the - Cloud Console. - - Returns: - Callable[[~.ListBudgetsRequest], - Awaitable[~.ListBudgetsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_budgets' not in self._stubs: - self._stubs['list_budgets'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/ListBudgets', - request_serializer=budget_service.ListBudgetsRequest.serialize, - response_deserializer=budget_service.ListBudgetsResponse.deserialize, - ) - return self._stubs['list_budgets'] - - @property - def delete_budget(self) -> Callable[ - [budget_service.DeleteBudgetRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete budget method over gRPC. - - Deletes a budget. Returns successfully if already - deleted. - - Returns: - Callable[[~.DeleteBudgetRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_budget' not in self._stubs: - self._stubs['delete_budget'] = self._logged_channel.unary_unary( - '/google.cloud.billing.budgets.v1beta1.BudgetService/DeleteBudget', - request_serializer=budget_service.DeleteBudgetRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_budget'] - - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - self.create_budget: self._wrap_method( - self.create_budget, - default_timeout=60.0, - client_info=client_info, - ), - self.update_budget: self._wrap_method( - self.update_budget, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_budget: self._wrap_method( - self.get_budget, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_budgets: self._wrap_method( - self.list_budgets, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_budget: self._wrap_method( - self.delete_budget, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER - kwargs["kind"] = self.kind - return gapic_v1.method_async.wrap_method(func, *args, **kwargs) - - def close(self): - return self._logged_channel.close() - - @property - def kind(self) -> str: - return "grpc_asyncio" - - -__all__ = ( - 'BudgetServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/__init__.py deleted file mode 100644 index dedea8f1b736..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/__init__.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .budget_model import ( - AllUpdatesRule, - Budget, - BudgetAmount, - CustomPeriod, - Filter, - LastPeriodAmount, - ThresholdRule, - CalendarPeriod, -) -from .budget_service import ( - CreateBudgetRequest, - DeleteBudgetRequest, - GetBudgetRequest, - ListBudgetsRequest, - ListBudgetsResponse, - UpdateBudgetRequest, -) - -__all__ = ( - 'AllUpdatesRule', - 'Budget', - 'BudgetAmount', - 'CustomPeriod', - 'Filter', - 'LastPeriodAmount', - 'ThresholdRule', - 'CalendarPeriod', - 'CreateBudgetRequest', - 'DeleteBudgetRequest', - 'GetBudgetRequest', - 'ListBudgetsRequest', - 'ListBudgetsResponse', - 'UpdateBudgetRequest', -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/budget_model.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/budget_model.py deleted file mode 100644 index 604c82e410fd..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/budget_model.py +++ /dev/null @@ -1,531 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import money_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.billing.budgets.v1beta1', - manifest={ - 'CalendarPeriod', - 'Budget', - 'BudgetAmount', - 'LastPeriodAmount', - 'ThresholdRule', - 'AllUpdatesRule', - 'Filter', - 'CustomPeriod', - }, -) - - -class CalendarPeriod(proto.Enum): - r"""A ``CalendarPeriod`` represents the abstract concept of a time - period that has a canonical start. Grammatically, "the start of the - current ``CalendarPeriod``". All calendar times begin at 12 AM US - and Canadian Pacific Time (UTC-8). - - Values: - CALENDAR_PERIOD_UNSPECIFIED (0): - Calendar period is unset. This is the default - if the budget is for a custom time period - (CustomPeriod). - MONTH (1): - A month. Month starts on the first day of - each month, such as January 1, February 1, March - 1, and so on. - QUARTER (2): - A quarter. Quarters start on dates January 1, - April 1, July 1, and October 1 of each year. - YEAR (3): - A year. Year starts on January 1. - """ - CALENDAR_PERIOD_UNSPECIFIED = 0 - MONTH = 1 - QUARTER = 2 - YEAR = 3 - - -class Budget(proto.Message): - r"""A budget is a plan that describes what you expect to spend on - Cloud projects, plus the rules to execute as spend is tracked - against that plan, (for example, send an alert when 90% of the - target spend is met). The budget time period is configurable, - with options such as month (default), quarter, year, or custom - time period. - - Attributes: - name (str): - Output only. Resource name of the budget. The resource name - implies the scope of a budget. Values are of the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - display_name (str): - User data for display name in UI. - Validation: <= 60 chars. - budget_filter (google.cloud.billing.budgets_v1beta1.types.Filter): - Optional. Filters that define which resources - are used to compute the actual spend against the - budget amount, such as projects, services, and - the budget's time period, as well as other - filters. - amount (google.cloud.billing.budgets_v1beta1.types.BudgetAmount): - Required. Budgeted amount. - threshold_rules (MutableSequence[google.cloud.billing.budgets_v1beta1.types.ThresholdRule]): - Optional. Rules that trigger alerts (notifications of - thresholds being crossed) when spend exceeds the specified - percentages of the budget. - - Optional for ``pubsubTopic`` notifications. - - Required if using email notifications. - all_updates_rule (google.cloud.billing.budgets_v1beta1.types.AllUpdatesRule): - Optional. Rules to apply to notifications - sent based on budget spend and thresholds. - etag (str): - Optional. Etag to validate that the object is - unchanged for a read-modify-write operation. - An empty etag will cause an update to overwrite - other changes. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - display_name: str = proto.Field( - proto.STRING, - number=2, - ) - budget_filter: 'Filter' = proto.Field( - proto.MESSAGE, - number=3, - message='Filter', - ) - amount: 'BudgetAmount' = proto.Field( - proto.MESSAGE, - number=4, - message='BudgetAmount', - ) - threshold_rules: MutableSequence['ThresholdRule'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='ThresholdRule', - ) - all_updates_rule: 'AllUpdatesRule' = proto.Field( - proto.MESSAGE, - number=6, - message='AllUpdatesRule', - ) - etag: str = proto.Field( - proto.STRING, - number=7, - ) - - -class BudgetAmount(proto.Message): - r"""The budgeted amount for each usage period. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - specified_amount (google.type.money_pb2.Money): - A specified amount to use as the budget. ``currency_code`` - is optional. If specified when creating a budget, it must - match the currency of the billing account. If specified when - updating a budget, it must match the currency_code of the - existing budget. The ``currency_code`` is provided on - output. - - This field is a member of `oneof`_ ``budget_amount``. - last_period_amount (google.cloud.billing.budgets_v1beta1.types.LastPeriodAmount): - Use the last period's actual spend as the budget for the - present period. LastPeriodAmount can only be set when the - budget's time period is a - [Filter.calendar_period][google.cloud.billing.budgets.v1beta1.Filter.calendar_period]. - It cannot be set in combination with - [Filter.custom_period][google.cloud.billing.budgets.v1beta1.Filter.custom_period]. - - This field is a member of `oneof`_ ``budget_amount``. - """ - - specified_amount: money_pb2.Money = proto.Field( - proto.MESSAGE, - number=1, - oneof='budget_amount', - message=money_pb2.Money, - ) - last_period_amount: 'LastPeriodAmount' = proto.Field( - proto.MESSAGE, - number=2, - oneof='budget_amount', - message='LastPeriodAmount', - ) - - -class LastPeriodAmount(proto.Message): - r"""Describes a budget amount targeted to the last - [Filter.calendar_period][google.cloud.billing.budgets.v1beta1.Filter.calendar_period] - spend. At this time, the amount is automatically 100% of the last - calendar period's spend; that is, there are no other options yet. - Future configuration options will be described here (for example, - configuring a percentage of last period's spend). LastPeriodAmount - cannot be set for a budget configured with a - [Filter.custom_period][google.cloud.billing.budgets.v1beta1.Filter.custom_period]. - - """ - - -class ThresholdRule(proto.Message): - r"""ThresholdRule contains the definition of a threshold. Threshold - rules define the triggering events used to generate a budget - notification email. When a threshold is crossed (spend exceeds the - specified percentages of the budget), budget alert emails are sent - to the email recipients you specify in the - `NotificationsRule <#notificationsrule>`__. - - Threshold rules also affect the fields included in the `JSON data - object `__ - sent to a Pub/Sub topic. - - Threshold rules are *required* if using email notifications. - - Threshold rules are *optional* if only setting a ```pubsubTopic`` - NotificationsRule <#NotificationsRule>`__, unless you want your JSON - data object to include data about the thresholds you set. - - For more information, see `set budget threshold rules and - actions `__. - - Attributes: - threshold_percent (float): - Required. Send an alert when this threshold - is exceeded. This is a 1.0-based percentage, so - 0.5 = 50%. Validation: non-negative number. - spend_basis (google.cloud.billing.budgets_v1beta1.types.ThresholdRule.Basis): - Optional. The type of basis used to determine if spend has - passed the threshold. Behavior defaults to CURRENT_SPEND if - not set. - """ - class Basis(proto.Enum): - r"""The type of basis used to determine if spend has passed the - threshold. - - Values: - BASIS_UNSPECIFIED (0): - Unspecified threshold basis. - CURRENT_SPEND (1): - Use current spend as the basis for comparison - against the threshold. - FORECASTED_SPEND (2): - Use forecasted spend for the period as the basis for - comparison against the threshold. FORECASTED_SPEND can only - be set when the budget's time period is a - [Filter.calendar_period][google.cloud.billing.budgets.v1beta1.Filter.calendar_period]. - It cannot be set in combination with - [Filter.custom_period][google.cloud.billing.budgets.v1beta1.Filter.custom_period]. - """ - BASIS_UNSPECIFIED = 0 - CURRENT_SPEND = 1 - FORECASTED_SPEND = 2 - - threshold_percent: float = proto.Field( - proto.DOUBLE, - number=1, - ) - spend_basis: Basis = proto.Field( - proto.ENUM, - number=2, - enum=Basis, - ) - - -class AllUpdatesRule(proto.Message): - r"""AllUpdatesRule defines notifications that are sent based on - budget spend and thresholds. - - Attributes: - pubsub_topic (str): - Optional. The name of the Pub/Sub topic where budget related - messages will be published, in the form - ``projects/{project_id}/topics/{topic_id}``. Updates are - sent at regular intervals to the topic. The topic needs to - be created before the budget is created; see - https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications - for more details. Caller is expected to have - ``pubsub.topics.setIamPolicy`` permission on the topic when - it's set for a budget, otherwise, the API call will fail - with PERMISSION_DENIED. See - https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications#permissions_required_for_this_task - for more details on Pub/Sub roles and permissions. - schema_version (str): - Optional. Required when - [AllUpdatesRule.pubsub_topic][google.cloud.billing.budgets.v1beta1.AllUpdatesRule.pubsub_topic] - is set. The schema version of the notification sent to - [AllUpdatesRule.pubsub_topic][google.cloud.billing.budgets.v1beta1.AllUpdatesRule.pubsub_topic]. - Only "1.0" is accepted. It represents the JSON schema as - defined in - https://cloud.google.com/billing/docs/how-to/budgets-programmatic-notifications#notification_format. - monitoring_notification_channels (MutableSequence[str]): - Optional. Targets to send notifications to when a threshold - is exceeded. This is in addition to default recipients who - have billing account IAM roles. The value is the full REST - resource name of a monitoring notification channel with the - form - ``projects/{project_id}/notificationChannels/{channel_id}``. - A maximum of 5 channels are allowed. See - https://cloud.google.com/billing/docs/how-to/budgets-notification-recipients - for more details. - disable_default_iam_recipients (bool): - Optional. When set to true, disables default - notifications sent when a threshold is exceeded. - Default notifications are sent to those with - Billing Account Administrator and Billing - Account User IAM roles for the target account. - enable_project_level_recipients (bool): - Optional. When set to true, and when the budget has a single - project configured, notifications will be sent to project - level recipients of that project. This field will be ignored - if the budget has multiple or no project configured. - - Currently, project level recipients are the users with - ``Owner`` role on a cloud project. - """ - - pubsub_topic: str = proto.Field( - proto.STRING, - number=1, - ) - schema_version: str = proto.Field( - proto.STRING, - number=2, - ) - monitoring_notification_channels: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - disable_default_iam_recipients: bool = proto.Field( - proto.BOOL, - number=4, - ) - enable_project_level_recipients: bool = proto.Field( - proto.BOOL, - number=5, - ) - - -class Filter(proto.Message): - r"""A filter for a budget, limiting the scope of the cost to - calculate. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - projects (MutableSequence[str]): - Optional. A set of projects of the form - ``projects/{project}``, specifying that usage from only this - set of projects should be included in the budget. If - omitted, the report will include all usage for the billing - account, regardless of which project the usage occurred on. - resource_ancestors (MutableSequence[str]): - Optional. A set of folder and organization names of the form - ``folders/{folderId}`` or - ``organizations/{organizationId}``, specifying that usage - from only this set of folders and organizations should be - included in the budget. If omitted, the budget includes all - usage that the billing account pays for. If the folder or - organization contains projects that are paid for by a - different Cloud Billing account, the budget *doesn't* apply - to those projects. - credit_types (MutableSequence[str]): - Optional. If - [Filter.credit_types_treatment][google.cloud.billing.budgets.v1beta1.Filter.credit_types_treatment] - is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types - to be subtracted from gross cost to determine the spend for - threshold calculations. See `a list of acceptable credit - type - values `__. - - If - [Filter.credit_types_treatment][google.cloud.billing.budgets.v1beta1.Filter.credit_types_treatment] - is **not** INCLUDE_SPECIFIED_CREDITS, this field must be - empty. - credit_types_treatment (google.cloud.billing.budgets_v1beta1.types.Filter.CreditTypesTreatment): - Optional. If not set, default behavior is - ``INCLUDE_ALL_CREDITS``. - services (MutableSequence[str]): - Optional. A set of services of the form - ``services/{service_id}``, specifying that usage from only - this set of services should be included in the budget. If - omitted, the report will include usage for all the services. - The service names are available through the Catalog API: - https://cloud.google.com/billing/v1/how-tos/catalog-api. - subaccounts (MutableSequence[str]): - Optional. A set of subaccounts of the form - ``billingAccounts/{account_id}``, specifying that usage from - only this set of subaccounts should be included in the - budget. If a subaccount is set to the name of the parent - account, usage from the parent account will be included. If - omitted, the report will include usage from the parent - account and all subaccounts, if they exist. - labels (MutableMapping[str, google.protobuf.struct_pb2.ListValue]): - Optional. A single label and value pair specifying that - usage from only this set of labeled resources should be - included in the budget. If omitted, the report will include - all labeled and unlabeled usage. - - An object containing a single ``"key": value`` pair. - Example: ``{ "name": "wrench" }``. - - *Currently, multiple entries or multiple values per entry - are not allowed.* - calendar_period (google.cloud.billing.budgets_v1beta1.types.CalendarPeriod): - Optional. Specifies to track usage for - recurring calendar period. For example, assume - that CalendarPeriod.QUARTER is set. The budget - will track usage from April 1 to June 30, when - the current calendar month is April, May, June. - After that, it will track usage from July 1 to - September 30 when the current calendar month is - July, August, September, so on. - - This field is a member of `oneof`_ ``usage_period``. - custom_period (google.cloud.billing.budgets_v1beta1.types.CustomPeriod): - Optional. Specifies to track usage from any - start date (required) to any end date - (optional). This time period is static, it does - not recur. - - This field is a member of `oneof`_ ``usage_period``. - """ - class CreditTypesTreatment(proto.Enum): - r"""Specifies how credits are applied when determining the spend for - threshold calculations. Budgets track the total cost minus any - applicable selected credits. `See the documentation for a list of - credit - types `__. - - Values: - CREDIT_TYPES_TREATMENT_UNSPECIFIED (0): - No description available. - INCLUDE_ALL_CREDITS (1): - All types of credit are subtracted from the - gross cost to determine the spend for threshold - calculations. - EXCLUDE_ALL_CREDITS (2): - All types of credit are added to the net cost - to determine the spend for threshold - calculations. - INCLUDE_SPECIFIED_CREDITS (3): - `Credit - types `__ - specified in the credit_types field are subtracted from the - gross cost to determine the spend for threshold - calculations. - """ - CREDIT_TYPES_TREATMENT_UNSPECIFIED = 0 - INCLUDE_ALL_CREDITS = 1 - EXCLUDE_ALL_CREDITS = 2 - INCLUDE_SPECIFIED_CREDITS = 3 - - projects: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - resource_ancestors: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - credit_types: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - credit_types_treatment: CreditTypesTreatment = proto.Field( - proto.ENUM, - number=4, - enum=CreditTypesTreatment, - ) - services: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - subaccounts: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - labels: MutableMapping[str, struct_pb2.ListValue] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=6, - message=struct_pb2.ListValue, - ) - calendar_period: 'CalendarPeriod' = proto.Field( - proto.ENUM, - number=8, - oneof='usage_period', - enum='CalendarPeriod', - ) - custom_period: 'CustomPeriod' = proto.Field( - proto.MESSAGE, - number=9, - oneof='usage_period', - message='CustomPeriod', - ) - - -class CustomPeriod(proto.Message): - r"""All date times begin at 12 AM US and Canadian Pacific Time - (UTC-8). - - Attributes: - start_date (google.type.date_pb2.Date): - Required. The start date must be after - January 1, 2017. - end_date (google.type.date_pb2.Date): - Optional. The end date of the time period. Budgets with - elapsed end date won't be processed. If unset, specifies to - track all usage incurred since the start_date. - """ - - start_date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - end_date: date_pb2.Date = proto.Field( - proto.MESSAGE, - number=2, - message=date_pb2.Date, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/budget_service.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/budget_service.py deleted file mode 100644 index e0eee6928d90..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/google/cloud/billing/budgets_v1beta1/types/budget_service.py +++ /dev/null @@ -1,199 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from __future__ import annotations - -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.protobuf import field_mask_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.billing.budgets.v1beta1', - manifest={ - 'CreateBudgetRequest', - 'UpdateBudgetRequest', - 'GetBudgetRequest', - 'ListBudgetsRequest', - 'ListBudgetsResponse', - 'DeleteBudgetRequest', - }, -) - - -class CreateBudgetRequest(proto.Message): - r"""Request for CreateBudget - - Attributes: - parent (str): - Required. The name of the billing account to create the - budget in. Values are of the form - ``billingAccounts/{billingAccountId}``. - budget (google.cloud.billing.budgets_v1beta1.types.Budget): - Required. Budget to create. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - budget: budget_model.Budget = proto.Field( - proto.MESSAGE, - number=2, - message=budget_model.Budget, - ) - - -class UpdateBudgetRequest(proto.Message): - r"""Request for UpdateBudget - - Attributes: - budget (google.cloud.billing.budgets_v1beta1.types.Budget): - Required. The updated budget object. - The budget to update is specified by the budget - name in the budget. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Optional. Indicates which fields in the provided budget to - update. Read-only fields (such as ``name``) cannot be - changed. If this is not provided, then only fields with - non-default values from the request are updated. See - https://developers.google.com/protocol-buffers/docs/proto3#default - for more details about default values. - """ - - budget: budget_model.Budget = proto.Field( - proto.MESSAGE, - number=1, - message=budget_model.Budget, - ) - update_mask: field_mask_pb2.FieldMask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class GetBudgetRequest(proto.Message): - r"""Request for GetBudget - - Attributes: - name (str): - Required. Name of budget to get. Values are of the form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListBudgetsRequest(proto.Message): - r"""Request for ListBudgets - - Attributes: - parent (str): - Required. Name of billing account to list budgets under. - Values are of the form - ``billingAccounts/{billingAccountId}``. - scope (str): - Optional. Set the scope of the budgets to be - returned, in the format of the resource name. - The scope of a budget is the cost that it - tracks, such as costs for a single project, or - the costs for all projects in a folder. Only - project scope (in the format of - "projects/project-id" or "projects/123") is - supported in this field. When this field is set - to a project's resource name, the budgets - returned are tracking the costs for that - project. - page_size (int): - Optional. The maximum number of budgets to - return per page. The default and maximum value - are 100. - page_token (str): - Optional. The value returned by the last - ``ListBudgetsResponse`` which indicates that this is a - continuation of a prior ``ListBudgets`` call, and that the - system should return the next page of data. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - scope: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListBudgetsResponse(proto.Message): - r"""Response for ListBudgets - - Attributes: - budgets (MutableSequence[google.cloud.billing.budgets_v1beta1.types.Budget]): - List of the budgets owned by the requested - billing account. - next_page_token (str): - If not empty, indicates that there may be more budgets that - match the request; this value should be passed in a new - ``ListBudgetsRequest``. - """ - - @property - def raw_page(self): - return self - - budgets: MutableSequence[budget_model.Budget] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=budget_model.Budget, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteBudgetRequest(proto.Message): - r"""Request for DeleteBudget - - Attributes: - name (str): - Required. Name of the budget to delete. Values are of the - form - ``billingAccounts/{billingAccountId}/budgets/{budgetId}``. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/mypy.ini b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/mypy.ini deleted file mode 100644 index 574c5aed394b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/noxfile.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/noxfile.py deleted file mode 100644 index 6eb549c48480..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/noxfile.py +++ /dev/null @@ -1,280 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import re -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", - "3.12", - "3.13", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-billing-budgets' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "prerelease_deps", -] - -@nox.session(python=ALL_PYTHON) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def unit(session, protobuf_implementation): - """Run the unit test suite.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") - - # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. - # The 'cpp' implementation requires Protobuf<4. - if protobuf_implementation == "cpp": - session.install("protobuf<4") - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/billing/budgets_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() - - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] - - session.install(*constraints_deps) - - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/billing/budgets_v1beta1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_create_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_create_budget_async.py deleted file mode 100644 index d810db46516d..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_create_budget_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_CreateBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -async def sample_create_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = await client.create_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_CreateBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_create_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_create_budget_sync.py deleted file mode 100644 index 7face1a6256a..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_create_budget_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_CreateBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -def sample_create_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.CreateBudgetRequest( - parent="parent_value", - ) - - # Make the request - response = client.create_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_CreateBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_delete_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_delete_budget_async.py deleted file mode 100644 index 96c4882a6e02..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_delete_budget_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_DeleteBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -async def sample_delete_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - await client.delete_budget(request=request) - - -# [END billingbudgets_v1beta1_generated_BudgetService_DeleteBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_delete_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_delete_budget_sync.py deleted file mode 100644 index 20c04b2a2c7b..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_delete_budget_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_DeleteBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -def sample_delete_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.DeleteBudgetRequest( - name="name_value", - ) - - # Make the request - client.delete_budget(request=request) - - -# [END billingbudgets_v1beta1_generated_BudgetService_DeleteBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_get_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_get_budget_async.py deleted file mode 100644 index a8415eaba6d0..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_get_budget_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_GetBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -async def sample_get_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = await client.get_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_GetBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_get_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_get_budget_sync.py deleted file mode 100644 index 37e82d97bc3e..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_get_budget_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_GetBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -def sample_get_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.GetBudgetRequest( - name="name_value", - ) - - # Make the request - response = client.get_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_GetBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_list_budgets_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_list_budgets_async.py deleted file mode 100644 index f3e93c172a41..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_list_budgets_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBudgets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_ListBudgets_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -async def sample_list_budgets(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_ListBudgets_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_list_budgets_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_list_budgets_sync.py deleted file mode 100644 index 5c168bf6eb30..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_list_budgets_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListBudgets -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_ListBudgets_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -def sample_list_budgets(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.ListBudgetsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_budgets(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_ListBudgets_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_update_budget_async.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_update_budget_async.py deleted file mode 100644 index 55360f01fa78..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_update_budget_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_UpdateBudget_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -async def sample_update_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceAsyncClient() - - # Initialize request argument(s) - request = budgets_v1beta1.UpdateBudgetRequest( - ) - - # Make the request - response = await client.update_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_UpdateBudget_async] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_update_budget_sync.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_update_budget_sync.py deleted file mode 100644 index c17787b90662..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/billingbudgets_v1beta1_generated_budget_service_update_budget_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateBudget -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-billing-budgets - - -# [START billingbudgets_v1beta1_generated_BudgetService_UpdateBudget_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.billing import budgets_v1beta1 - - -def sample_update_budget(): - # Create a client - client = budgets_v1beta1.BudgetServiceClient() - - # Initialize request argument(s) - request = budgets_v1beta1.UpdateBudgetRequest( - ) - - # Make the request - response = client.update_budget(request=request) - - # Handle the response - print(response) - -# [END billingbudgets_v1beta1_generated_BudgetService_UpdateBudget_sync] diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json deleted file mode 100644 index 2c44cd8f7269..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ /dev/null @@ -1,774 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.billing.budgets.v1beta1", - "version": "v1beta1" - } - ], - "language": "PYTHON", - "name": "google-cloud-billing-budgets", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient.create_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.CreateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "CreateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.CreateBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.types.Budget", - "shortName": "create_budget" - }, - "description": "Sample for CreateBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_create_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_CreateBudget_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_create_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient.create_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.CreateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "CreateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.CreateBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.types.Budget", - "shortName": "create_budget" - }, - "description": "Sample for CreateBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_create_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_CreateBudget_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_create_budget_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient.delete_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.DeleteBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "DeleteBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.DeleteBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_budget" - }, - "description": "Sample for DeleteBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_delete_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_DeleteBudget_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_delete_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient.delete_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.DeleteBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "DeleteBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.DeleteBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "shortName": "delete_budget" - }, - "description": "Sample for DeleteBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_delete_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_DeleteBudget_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_delete_budget_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient.get_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.GetBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "GetBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.GetBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.types.Budget", - "shortName": "get_budget" - }, - "description": "Sample for GetBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_get_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_GetBudget_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_get_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient.get_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.GetBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "GetBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.GetBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.types.Budget", - "shortName": "get_budget" - }, - "description": "Sample for GetBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_get_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_GetBudget_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_get_budget_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient.list_budgets", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.ListBudgets", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "ListBudgets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.ListBudgetsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.services.budget_service.pagers.ListBudgetsAsyncPager", - "shortName": "list_budgets" - }, - "description": "Sample for ListBudgets", - "file": "billingbudgets_v1beta1_generated_budget_service_list_budgets_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_ListBudgets_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_list_budgets_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient.list_budgets", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.ListBudgets", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "ListBudgets" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.ListBudgetsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.services.budget_service.pagers.ListBudgetsPager", - "shortName": "list_budgets" - }, - "description": "Sample for ListBudgets", - "file": "billingbudgets_v1beta1_generated_budget_service_list_budgets_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_ListBudgets_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_list_budgets_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient", - "shortName": "BudgetServiceAsyncClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceAsyncClient.update_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.UpdateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "UpdateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.UpdateBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.types.Budget", - "shortName": "update_budget" - }, - "description": "Sample for UpdateBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_update_budget_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_UpdateBudget_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_update_budget_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient", - "shortName": "BudgetServiceClient" - }, - "fullName": "google.cloud.billing.budgets_v1beta1.BudgetServiceClient.update_budget", - "method": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService.UpdateBudget", - "service": { - "fullName": "google.cloud.billing.budgets.v1beta1.BudgetService", - "shortName": "BudgetService" - }, - "shortName": "UpdateBudget" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.billing.budgets_v1beta1.types.UpdateBudgetRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.billing.budgets_v1beta1.types.Budget", - "shortName": "update_budget" - }, - "description": "Sample for UpdateBudget", - "file": "billingbudgets_v1beta1_generated_budget_service_update_budget_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "billingbudgets_v1beta1_generated_BudgetService_UpdateBudget_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "billingbudgets_v1beta1_generated_budget_service_update_budget_sync.py" - } - ] -} diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/scripts/fixup_budgets_v1beta1_keywords.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/scripts/fixup_budgets_v1beta1_keywords.py deleted file mode 100644 index a4ef41c09acc..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/scripts/fixup_budgets_v1beta1_keywords.py +++ /dev/null @@ -1,180 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class budgetsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_budget': ('parent', 'budget', ), - 'delete_budget': ('name', ), - 'get_budget': ('name', ), - 'list_budgets': ('parent', 'scope', 'page_size', 'page_token', ), - 'update_budget': ('budget', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=budgetsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the budgets client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/setup.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/setup.py deleted file mode 100644 index 03c978d368f5..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/setup.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import re - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-billing-budgets' - - -description = "Google Cloud Billing Budgets API client library" - -version = None - -with open(os.path.join(package_root, 'google/cloud/billing/budgets/gapic_version.py')) as fp: - version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) - assert (len(version_candidates) == 1) - version = version_candidates[0] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - # Exclude incompatible versions of `google-auth` - # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -extras = { -} -url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-billing-budgets" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.find_namespace_packages() - if package.startswith("google") -] - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - install_requires=dependencies, - extras_require=extras, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.10.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.11.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.12.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.12.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.12.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.13.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.13.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.13.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.7.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.7.txt deleted file mode 100644 index fc812592b0ee..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.7.txt +++ /dev/null @@ -1,10 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.1 -google-auth==2.14.1 -proto-plus==1.22.3 -protobuf==3.20.2 diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.8.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.9.txt b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9aed2559..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/budgets_v1beta1/__init__.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/budgets_v1beta1/__init__.py deleted file mode 100644 index 7b3de3117f38..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/budgets_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/budgets_v1beta1/test_budget_service.py b/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/budgets_v1beta1/test_budget_service.py deleted file mode 100644 index b9b8e18c3984..000000000000 --- a/owl-bot-staging/google-cloud-billing-budgets/v1beta1/tests/unit/gapic/budgets_v1beta1/test_budget_service.py +++ /dev/null @@ -1,3017 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import json -import math -import pytest -from google.api_core import api_core_version -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -try: - from google.auth.aio import credentials as ga_credentials_async - HAS_GOOGLE_AUTH_AIO = True -except ImportError: # pragma: NO COVER - HAS_GOOGLE_AUTH_AIO = False - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.billing.budgets_v1beta1.services.budget_service import BudgetServiceAsyncClient -from google.cloud.billing.budgets_v1beta1.services.budget_service import BudgetServiceClient -from google.cloud.billing.budgets_v1beta1.services.budget_service import pagers -from google.cloud.billing.budgets_v1beta1.services.budget_service import transports -from google.cloud.billing.budgets_v1beta1.types import budget_model -from google.cloud.billing.budgets_v1beta1.types import budget_service -from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import money_pb2 # type: ignore -import google.auth - - - -CRED_INFO_JSON = { - "credential_source": "/path/to/file", - "credential_type": "service account credentials", - "principal": "service-account@example.com", -} -CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) - - -async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER - chunk = data[i : i + chunk_size] - yield chunk.encode("utf-8") - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - -# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. -# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. -def async_anonymous_credentials(): - if HAS_GOOGLE_AUTH_AIO: - return ga_credentials_async.AnonymousCredentials() - return ga_credentials.AnonymousCredentials() - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - -# If default endpoint template is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint template so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint_template(client): - return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert BudgetServiceClient._get_default_mtls_endpoint(None) is None - assert BudgetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert BudgetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - -def test__read_environment_variables(): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert BudgetServiceClient._read_environment_variables() == (True, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BudgetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert BudgetServiceClient._read_environment_variables() == (False, "never", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert BudgetServiceClient._read_environment_variables() == (False, "always", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", None) - - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - BudgetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): - assert BudgetServiceClient._read_environment_variables() == (False, "auto", "foo.com") - -def test__get_client_cert_source(): - mock_provided_cert_source = mock.Mock() - mock_default_cert_source = mock.Mock() - - assert BudgetServiceClient._get_client_cert_source(None, False) is None - assert BudgetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None - assert BudgetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source - - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): - assert BudgetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source - assert BudgetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source - -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -def test__get_api_endpoint(): - api_override = "foo.com" - mock_client_cert_source = mock.Mock() - default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - default_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - assert BudgetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override - assert BudgetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - assert BudgetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint - assert BudgetServiceClient._get_api_endpoint(None, None, default_universe, "always") == BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - assert BudgetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BudgetServiceClient.DEFAULT_MTLS_ENDPOINT - assert BudgetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint - assert BudgetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint - - with pytest.raises(MutualTLSChannelError) as excinfo: - BudgetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") - assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." - - -def test__get_universe_domain(): - client_universe_domain = "foo.com" - universe_domain_env = "bar.com" - - assert BudgetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain - assert BudgetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env - assert BudgetServiceClient._get_universe_domain(None, None) == BudgetServiceClient._DEFAULT_UNIVERSE - - with pytest.raises(ValueError) as excinfo: - BudgetServiceClient._get_universe_domain("", None) - assert str(excinfo.value) == "Universe Domain cannot be an empty string." - -@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ - (401, CRED_INFO_JSON, True), - (403, CRED_INFO_JSON, True), - (404, CRED_INFO_JSON, True), - (500, CRED_INFO_JSON, False), - (401, None, False), - (403, None, False), - (404, None, False), - (500, None, False) -]) -def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): - cred = mock.Mock(["get_cred_info"]) - cred.get_cred_info = mock.Mock(return_value=cred_info_json) - client = BudgetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=["foo"]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - if show_cred_info: - assert error.details == ["foo", CRED_INFO_STRING] - else: - assert error.details == ["foo"] - -@pytest.mark.parametrize("error_code", [401,403,404,500]) -def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): - cred = mock.Mock([]) - assert not hasattr(cred, "get_cred_info") - client = BudgetServiceClient(credentials=cred) - client._transport._credentials = cred - - error = core_exceptions.GoogleAPICallError("message", details=[]) - error.code = error_code - - client._add_cred_info_for_auth_errors(error) - assert error.details == [] - -@pytest.mark.parametrize("client_class,transport_name", [ - (BudgetServiceClient, "grpc"), - (BudgetServiceAsyncClient, "grpc_asyncio"), -]) -def test_budget_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'billingbudgets.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.BudgetServiceGrpcTransport, "grpc"), - (transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_budget_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (BudgetServiceClient, "grpc"), - (BudgetServiceAsyncClient, "grpc_asyncio"), -]) -def test_budget_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'billingbudgets.googleapis.com:443' - ) - - -def test_budget_service_client_get_transport_class(): - transport = BudgetServiceClient.get_transport_class() - available_transports = [ - transports.BudgetServiceGrpcTransport, - ] - assert transport in available_transports - - transport = BudgetServiceClient.get_transport_class("grpc") - assert transport == transports.BudgetServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -def test_budget_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(BudgetServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(BudgetServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", "true"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", "false"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_budget_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - BudgetServiceClient, BudgetServiceAsyncClient -]) -@mock.patch.object(BudgetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BudgetServiceAsyncClient)) -def test_budget_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - -@pytest.mark.parametrize("client_class", [ - BudgetServiceClient, BudgetServiceAsyncClient -]) -@mock.patch.object(BudgetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceClient)) -@mock.patch.object(BudgetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BudgetServiceAsyncClient)) -def test_budget_service_client_client_api_endpoint(client_class): - mock_client_cert_source = client_cert_source_callback - api_override = "foo.com" - default_universe = BudgetServiceClient._DEFAULT_UNIVERSE - default_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) - mock_universe = "bar.com" - mock_endpoint = BudgetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", - # use ClientOptions.api_endpoint as the api endpoint regardless. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == api_override - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, - # and ClientOptions.universe_domain="bar.com", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. - options = client_options.ClientOptions() - universe_exists = hasattr(options, "universe_domain") - if universe_exists: - options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - else: - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) - assert client.universe_domain == (mock_universe if universe_exists else default_universe) - - # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. - options = client_options.ClientOptions() - if hasattr(options, "universe_domain"): - delattr(options, "universe_domain") - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == default_endpoint - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc"), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_budget_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", grpc_helpers), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_budget_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_budget_service_client_client_options_from_dict(): - with mock.patch('google.cloud.billing.budgets_v1beta1.services.budget_service.transports.BudgetServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = BudgetServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport, "grpc", grpc_helpers), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_budget_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "billingbudgets.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="billingbudgets.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - budget_service.CreateBudgetRequest, - dict, -]) -def test_create_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.CreateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_create_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.CreateBudgetRequest( - parent='parent_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.CreateBudgetRequest( - parent='parent_value', - ) - -def test_create_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_budget] = mock_rpc - request = {} - client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.create_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.create_budget] = mock_rpc - - request = {} - await client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.create_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_create_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.CreateBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.CreateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_create_budget_async_from_dict(): - await test_create_budget_async(request_type=dict) - -def test_create_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.CreateBudgetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.CreateBudgetRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - await client.create_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - budget_service.UpdateBudgetRequest, - dict, -]) -def test_update_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.UpdateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_update_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.UpdateBudgetRequest( - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.UpdateBudgetRequest( - ) - -def test_update_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_budget] = mock_rpc - request = {} - client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.update_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.update_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.update_budget] = mock_rpc - - request = {} - await client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.update_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_update_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.UpdateBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.UpdateBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_update_budget_async_from_dict(): - await test_update_budget_async(request_type=dict) - -def test_update_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.UpdateBudgetRequest() - - request.budget.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'budget.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.UpdateBudgetRequest() - - request.budget.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - await client.update_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'budget.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - budget_service.GetBudgetRequest, - dict, -]) -def test_get_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - ) - response = client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.GetBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -def test_get_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.GetBudgetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.GetBudgetRequest( - name='name_value', - ) - -def test_get_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.get_budget] = mock_rpc - request = {} - client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.get_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.get_budget] = mock_rpc - - request = {} - await client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.get_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_get_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.GetBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - response = await client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.GetBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, budget_model.Budget) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.etag == 'etag_value' - - -@pytest.mark.asyncio -async def test_get_budget_async_from_dict(): - await test_get_budget_async(request_type=dict) - -def test_get_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.GetBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.GetBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget()) - await client.get_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - budget_service.ListBudgetsRequest, - dict, -]) -def test_list_budgets(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.ListBudgetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBudgetsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_budgets_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.ListBudgetsRequest( - parent='parent_value', - scope='scope_value', - page_token='page_token_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_budgets(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.ListBudgetsRequest( - parent='parent_value', - scope='scope_value', - page_token='page_token_value', - ) - -def test_list_budgets_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_budgets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.list_budgets] = mock_rpc - request = {} - client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_budgets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_budgets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.list_budgets in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.list_budgets] = mock_rpc - - request = {} - await client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.list_budgets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_list_budgets_async(transport: str = 'grpc_asyncio', request_type=budget_service.ListBudgetsRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.ListBudgetsRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBudgetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_budgets_async_from_dict(): - await test_list_budgets_async(request_type=dict) - -def test_list_budgets_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.ListBudgetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value = budget_service.ListBudgetsResponse() - client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_budgets_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.ListBudgetsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse()) - await client.list_budgets(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_budgets_pager(transport_name: str = "grpc"): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_budgets(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, budget_model.Budget) - for i in results) -def test_list_budgets_pages(transport_name: str = "grpc"): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - pages = list(client.list_budgets(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_budgets_async_pager(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_budgets(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, budget_model.Budget) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_budgets_async_pages(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - budget_model.Budget(), - ], - next_page_token='abc', - ), - budget_service.ListBudgetsResponse( - budgets=[], - next_page_token='def', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - ], - next_page_token='ghi', - ), - budget_service.ListBudgetsResponse( - budgets=[ - budget_model.Budget(), - budget_model.Budget(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_budgets(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - budget_service.DeleteBudgetRequest, - dict, -]) -def test_delete_budget(request_type, transport: str = 'grpc'): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = budget_service.DeleteBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_budget_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = budget_service.DeleteBudgetRequest( - name='name_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_budget(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == budget_service.DeleteBudgetRequest( - name='name_value', - ) - -def test_delete_budget_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.delete_budget in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.delete_budget] = mock_rpc - request = {} - client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.delete_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_budget_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._client._transport.delete_budget in client._client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.AsyncMock() - mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport.delete_budget] = mock_rpc - - request = {} - await client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - await client.delete_budget(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - -@pytest.mark.asyncio -async def test_delete_budget_async(transport: str = 'grpc_asyncio', request_type=budget_service.DeleteBudgetRequest): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - request = budget_service.DeleteBudgetRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_budget_async_from_dict(): - await test_delete_budget_async(request_type=dict) - -def test_delete_budget_field_headers(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.DeleteBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value = None - client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_budget_field_headers_async(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = budget_service.DeleteBudgetRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_budget(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BudgetServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BudgetServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BudgetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.BudgetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_kind_grpc(): - transport = BudgetServiceClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" - - -def test_initialize_client_w_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.create_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.CreateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.update_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.UpdateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - call.return_value = budget_model.Budget() - client.get_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.GetBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_budgets_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - call.return_value = budget_service.ListBudgetsResponse() - client.list_budgets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.ListBudgetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_budget_empty_call_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - call.return_value = None - client.delete_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.DeleteBudgetRequest() - - assert args[0] == request_msg - - -def test_transport_kind_grpc_asyncio(): - transport = BudgetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" - - -def test_initialize_client_w_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - assert client is not None - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.create_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.CreateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.update_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.UpdateBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_model.Budget( - name='name_value', - display_name='display_name_value', - etag='etag_value', - )) - await client.get_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.GetBudgetRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_budgets_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_budgets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(budget_service.ListBudgetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_budgets(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.ListBudgetsRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_budget_empty_call_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_budget), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_budget(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = budget_service.DeleteBudgetRequest() - - assert args[0] == request_msg - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.BudgetServiceGrpcTransport, - ) - -def test_budget_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.BudgetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_budget_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.billing.budgets_v1beta1.services.budget_service.transports.BudgetServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.BudgetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_budget', - 'update_budget', - 'get_budget', - 'list_budgets', - 'delete_budget', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_budget_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.billing.budgets_v1beta1.services.budget_service.transports.BudgetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BudgetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_budget_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.billing.budgets_v1beta1.services.budget_service.transports.BudgetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BudgetServiceTransport() - adc.assert_called_once() - - -def test_budget_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BudgetServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceGrpcAsyncIOTransport, - ], -) -def test_budget_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-billing', 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.BudgetServiceGrpcTransport, - transports.BudgetServiceGrpcAsyncIOTransport, - ], -) -def test_budget_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.BudgetServiceGrpcTransport, grpc_helpers), - (transports.BudgetServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_budget_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "billingbudgets.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-billing', - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="billingbudgets.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.BudgetServiceGrpcTransport, transports.BudgetServiceGrpcAsyncIOTransport]) -def test_budget_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_budget_service_host_no_port(transport_name): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='billingbudgets.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'billingbudgets.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_budget_service_host_with_port(transport_name): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='billingbudgets.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'billingbudgets.googleapis.com:8000' - ) - -def test_budget_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BudgetServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_budget_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.BudgetServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BudgetServiceGrpcTransport, transports.BudgetServiceGrpcAsyncIOTransport]) -def test_budget_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.BudgetServiceGrpcTransport, transports.BudgetServiceGrpcAsyncIOTransport]) -def test_budget_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_budget_path(): - billing_account = "squid" - budget = "clam" - expected = "billingAccounts/{billing_account}/budgets/{budget}".format(billing_account=billing_account, budget=budget, ) - actual = BudgetServiceClient.budget_path(billing_account, budget) - assert expected == actual - - -def test_parse_budget_path(): - expected = { - "billing_account": "whelk", - "budget": "octopus", - } - path = BudgetServiceClient.budget_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_budget_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = BudgetServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = BudgetServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = BudgetServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = BudgetServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = BudgetServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = BudgetServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = BudgetServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = BudgetServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = BudgetServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = BudgetServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = BudgetServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.BudgetServiceTransport, '_prep_wrapped_messages') as prep: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.BudgetServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = BudgetServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -def test_transport_close_grpc(): - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -@pytest.mark.asyncio -async def test_transport_close_grpc_asyncio(): - client = BudgetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio" - ) - with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = BudgetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (BudgetServiceClient, transports.BudgetServiceGrpcTransport), - (BudgetServiceAsyncClient, transports.BudgetServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py index 3cbfc540326b..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.29.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py index 3cbfc540326b..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.29.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py index 5870dcb351e8..d7e56e549194 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -496,7 +496,10 @@ async def sample_list_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -725,7 +728,10 @@ async def sample_create_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -840,7 +846,10 @@ async def sample_get_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -948,7 +957,10 @@ async def sample_list_feeds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1067,7 +1079,10 @@ async def sample_update_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([feed]) + flattened_params = [feed] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1169,7 +1184,10 @@ async def sample_delete_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1421,7 +1439,10 @@ async def sample_search_all_resources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query, asset_types]) + flattened_params = [scope, query, asset_types] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1621,7 +1642,10 @@ async def sample_search_all_iam_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) + flattened_params = [scope, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2178,7 +2202,10 @@ async def sample_create_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, saved_query, saved_query_id]) + flattened_params = [parent, saved_query, saved_query_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2293,7 +2320,10 @@ async def sample_get_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2410,7 +2440,10 @@ async def sample_list_saved_queries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2541,7 +2574,10 @@ async def sample_update_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([saved_query, update_mask]) + flattened_params = [saved_query, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2647,7 +2683,10 @@ async def sample_delete_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2879,7 +2918,10 @@ async def sample_analyze_org_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3043,7 +3085,10 @@ async def sample_analyze_org_policy_governed_containers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3280,7 +3325,10 @@ async def sample_analyze_org_policy_governed_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py index 5cea5d80a980..01a9ba4195ad 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1004,7 +1004,10 @@ def sample_list_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1228,7 +1231,10 @@ def sample_create_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1340,7 +1346,10 @@ def sample_get_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1447,7 +1456,10 @@ def sample_list_feeds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1563,7 +1575,10 @@ def sample_update_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([feed]) + flattened_params = [feed] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1662,7 +1677,10 @@ def sample_delete_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1911,7 +1929,10 @@ def sample_search_all_resources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query, asset_types]) + flattened_params = [scope, query, asset_types] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2108,7 +2129,10 @@ def sample_search_all_iam_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) + flattened_params = [scope, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2656,7 +2680,10 @@ def sample_create_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, saved_query, saved_query_id]) + flattened_params = [parent, saved_query, saved_query_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2768,7 +2795,10 @@ def sample_get_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2882,7 +2912,10 @@ def sample_list_saved_queries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3010,7 +3043,10 @@ def sample_update_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([saved_query, update_mask]) + flattened_params = [saved_query, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3113,7 +3149,10 @@ def sample_delete_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3342,7 +3381,10 @@ def sample_analyze_org_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3503,7 +3545,10 @@ def sample_analyze_org_policy_governed_containers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3739,7 +3784,10 @@ def sample_analyze_org_policy_governed_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py index 3cbfc540326b..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.29.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py index 8343fbadafe5..d3a1c2ba97ea 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/async_client.py @@ -378,7 +378,10 @@ async def sample_search_all_resources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query, asset_types]) + flattened_params = [scope, query, asset_types] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -529,7 +532,10 @@ async def sample_search_all_iam_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) + flattened_params = [scope, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py index ccbacd7ce6cb..23e13b0dac16 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p1beta1/services/asset_service/client.py @@ -780,7 +780,10 @@ def sample_search_all_resources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query, asset_types]) + flattened_params = [scope, query, asset_types] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -928,7 +931,10 @@ def sample_search_all_iam_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) + flattened_params = [scope, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py index 3cbfc540326b..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.29.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py index d5e3753e1036..a339c2981dd0 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/async_client.py @@ -367,7 +367,10 @@ async def sample_create_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -482,7 +485,10 @@ async def sample_get_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -590,7 +596,10 @@ async def sample_list_feeds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -709,7 +718,10 @@ async def sample_update_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([feed]) + flattened_params = [feed] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -811,7 +823,10 @@ async def sample_delete_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py index 1dcd8fa26f7b..56f34a130bfc 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p2beta1/services/asset_service/client.py @@ -784,7 +784,10 @@ def sample_create_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -896,7 +899,10 @@ def sample_get_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1003,7 +1009,10 @@ def sample_list_feeds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1119,7 +1128,10 @@ def sample_update_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([feed]) + flattened_params = [feed] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1218,7 +1230,10 @@ def sample_delete_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py index 3cbfc540326b..558c8aab67c5 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.29.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py index d702fd530ab8..04d9482ec4b2 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/async_client.py @@ -364,7 +364,10 @@ async def sample_list_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py index f89848ece101..80481c2ac1bb 100644 --- a/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py +++ b/packages/google-cloud-asset/google/cloud/asset_v1p5beta1/services/asset_service/client.py @@ -822,7 +822,10 @@ def sample_list_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index c447f8100275..211efb19c2ad 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.29.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json index c1ec29f88001..13d603d43770 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.29.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json index 8449e9660ef6..abb0badfbf70 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p2beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.29.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json index a0cde19334da..2b27b5681d7c 100644 --- a/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json +++ b/packages/google-cloud-asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1p5beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "3.29.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py index cf18a472a8a2..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py index cf18a472a8a2..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py index e5730427ae41..5230644d396a 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/async_client.py @@ -390,7 +390,10 @@ async def sample_create_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, workload]) + flattened_params = [parent, workload] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -524,7 +527,10 @@ async def sample_update_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([workload, update_mask]) + flattened_params = [workload, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -726,7 +732,10 @@ async def sample_delete_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -834,7 +843,10 @@ async def sample_get_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -947,7 +959,10 @@ async def sample_list_workloads(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1075,7 +1090,10 @@ async def sample_list_violations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1189,7 +1207,10 @@ async def sample_get_violation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py index c71f0f9296ef..c3bb7b50bc56 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1/services/assured_workloads_service/client.py @@ -830,7 +830,10 @@ def sample_create_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, workload]) + flattened_params = [parent, workload] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -961,7 +964,10 @@ def sample_update_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([workload, update_mask]) + flattened_params = [workload, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1160,7 +1166,10 @@ def sample_delete_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1265,7 +1274,10 @@ def sample_get_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1375,7 +1387,10 @@ def sample_list_workloads(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1500,7 +1515,10 @@ def sample_list_violations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1611,7 +1629,10 @@ def sample_get_violation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py index cf18a472a8a2..558c8aab67c5 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.15.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py index 17438ddb1c9e..1aa1abec44eb 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py @@ -388,7 +388,10 @@ async def sample_create_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, workload]) + flattened_params = [parent, workload] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -523,7 +526,10 @@ async def sample_update_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([workload, update_mask]) + flattened_params = [workload, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -720,7 +726,10 @@ async def sample_delete_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -829,7 +838,10 @@ async def sample_get_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -955,7 +967,10 @@ async def sample_analyze_workload_move(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, target]) + flattened_params = [project, target] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1064,7 +1079,10 @@ async def sample_list_workloads(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py index 34fcc666eeb2..fa618ffb428a 100644 --- a/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py +++ b/packages/google-cloud-assured-workloads/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py @@ -808,7 +808,10 @@ def sample_create_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, workload]) + flattened_params = [parent, workload] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -940,7 +943,10 @@ def sample_update_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([workload, update_mask]) + flattened_params = [workload, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1134,7 +1140,10 @@ def sample_delete_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1240,7 +1249,10 @@ def sample_get_workload(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1363,7 +1375,10 @@ def sample_analyze_workload_move(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project, target]) + flattened_params = [project, target] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1469,7 +1484,10 @@ def sample_list_workloads(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json index 6ef4c5d83ad4..15061dde8248 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.15.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json index 5c2437be53c9..d4f3e3793a9e 100644 --- a/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json +++ b/packages/google-cloud-assured-workloads/samples/generated_samples/snippet_metadata_google.cloud.assuredworkloads.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-assured-workloads", - "version": "1.15.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py index 1230672bab9e..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py index 1230672bab9e..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py index 324a8847294a..f8921aca0420 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/async_client.py @@ -399,7 +399,10 @@ async def sample_create_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, dataset]) + flattened_params = [parent, dataset] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -519,7 +522,10 @@ async def sample_get_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -631,7 +637,10 @@ async def sample_list_datasets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -764,7 +773,10 @@ async def sample_update_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([dataset, update_mask]) + flattened_params = [dataset, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -894,7 +906,10 @@ async def sample_delete_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1047,7 +1062,10 @@ async def sample_import_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config]) + flattened_params = [name, input_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1194,7 +1212,10 @@ async def sample_export_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1310,7 +1331,10 @@ async def sample_get_annotation_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1436,7 +1460,10 @@ async def sample_create_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, model]) + flattened_params = [parent, model] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1552,7 +1579,10 @@ async def sample_get_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1664,7 +1694,10 @@ async def sample_list_models(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1800,7 +1833,10 @@ async def sample_delete_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1923,7 +1959,10 @@ async def sample_update_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([model, update_mask]) + flattened_params = [model, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2062,7 +2101,10 @@ async def sample_deploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2200,7 +2242,10 @@ async def sample_undeploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2351,7 +2396,10 @@ async def sample_export_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2467,7 +2515,10 @@ async def sample_get_model_evaluation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2602,7 +2653,10 @@ async def sample_list_model_evaluations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, filter]) + flattened_params = [parent, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py index dbd972ee5916..6f45a5c4d50e 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/auto_ml/client.py @@ -891,7 +891,10 @@ def sample_create_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, dataset]) + flattened_params = [parent, dataset] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1008,7 +1011,10 @@ def sample_get_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1117,7 +1123,10 @@ def sample_list_datasets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1247,7 +1256,10 @@ def sample_update_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([dataset, update_mask]) + flattened_params = [dataset, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1374,7 +1386,10 @@ def sample_delete_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1524,7 +1539,10 @@ def sample_import_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config]) + flattened_params = [name, input_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1668,7 +1686,10 @@ def sample_export_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1781,7 +1802,10 @@ def sample_get_annotation_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1904,7 +1928,10 @@ def sample_create_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, model]) + flattened_params = [parent, model] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2017,7 +2044,10 @@ def sample_get_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2126,7 +2156,10 @@ def sample_list_models(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2259,7 +2292,10 @@ def sample_delete_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2379,7 +2415,10 @@ def sample_update_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([model, update_mask]) + flattened_params = [model, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2515,7 +2554,10 @@ def sample_deploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2650,7 +2692,10 @@ def sample_undeploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2798,7 +2843,10 @@ def sample_export_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2911,7 +2959,10 @@ def sample_get_model_evaluation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3043,7 +3094,10 @@ def sample_list_model_evaluations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, filter]) + flattened_params = [parent, filter] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py index 424d228937c1..f46c7b9f88e5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/async_client.py @@ -452,7 +452,10 @@ async def sample_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, payload, params]) + flattened_params = [name, payload, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -705,7 +708,10 @@ async def sample_batch_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config, output_config, params]) + flattened_params = [name, input_config, output_config, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py index cf45f7ea3c5f..5f3a8c8d5b5f 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/services/prediction_service/client.py @@ -876,7 +876,10 @@ def sample_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, payload, params]) + flattened_params = [name, payload, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1127,7 +1130,10 @@ def sample_batch_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config, output_config, params]) + flattened_params = [name, input_config, output_config, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1/types/io.py b/packages/google-cloud-automl/google/cloud/automl_v1/types/io.py index b9cd0ddb10d0..8b14c11e07d4 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1/types/io.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1/types/io.py @@ -55,9 +55,11 @@ class InputConfig(proto.Message): with non-terminal symbols defined near the end of this comment. The formats are: - AutoML Vision: + AutoML Vision + ^^^^^^^^^^^^^ - Classification: + Classification + '''''''''''''' See `Preparing your training data `__ for @@ -98,7 +100,8 @@ class InputConfig(proto.Message): UNASSIGNED,gs://folder/image3.jpg,daisy UNASSIGNED,gs://folder/image4.jpg - Object Detection: + Object Detection + '''''''''''''''' See `Preparing your training data `__ @@ -148,9 +151,11 @@ class InputConfig(proto.Message): - AutoML Video Intelligence: + AutoML Video Intelligence + ^^^^^^^^^^^^^^^^^^^^^^^^^ - Classification: + Classification + '''''''''''''' See `Preparing your training data `__ @@ -200,7 +205,8 @@ class InputConfig(proto.Message): gs://folder/vid2.avi,car,0,60.5 gs://folder/vid3.avi,,, - Object Tracking: + Object Tracking + ''''''''''''''' See `Preparing your training data `__ @@ -263,9 +269,11 @@ class InputConfig(proto.Message): gs://folder/video2.avi,car,1,0,.1,.9,,,.9,.1,, gs://folder/video2.avi,,,,,,,,,,, - AutoML Natural Language: + AutoML Natural Language + ^^^^^^^^^^^^^^^^^^^^^^^ - Entity Extraction: + Entity Extraction + ''''''''''''''''' See `Preparing your training data `__ for @@ -459,7 +467,8 @@ class InputConfig(proto.Message): }, ], - Classification: + Classification + '''''''''''''' See `Preparing your training data `__ @@ -511,7 +520,8 @@ class InputConfig(proto.Message): TEST,gs://folder/document.pdf VALIDATE,gs://folder/text_files.zip,BadFood - Sentiment Analysis: + Sentiment Analysis + '''''''''''''''''' See `Preparing your training data `__ @@ -574,7 +584,8 @@ class InputConfig(proto.Message): TEST,gs://folder/document.pdf VALIDATE,gs://folder/text_files.zip,2 - AutoML Tables: + AutoML Tables + ^^^^^^^^^^^^^ See `Preparing your training data `__ for @@ -691,7 +702,8 @@ class InputConfig(proto.Message): semantic of the imported data, any string must be up to 25000 characters long. - AutoML Tables: + AutoML Tables + ^^^^^^^^^^^^^ ``schema_inference_version`` : (integer) This value must be supplied. The version of the algorithm to use for the @@ -724,9 +736,11 @@ class BatchPredictInputConfig(proto.Message): with non-terminal symbols defined near the end of this comment. The formats are: - AutoML Vision: + AutoML Vision + ^^^^^^^^^^^^^ - Classification: + Classification + '''''''''''''' One or more CSV files where each line is a single column: @@ -746,7 +760,8 @@ class BatchPredictInputConfig(proto.Message): gs://folder/image2.gif gs://folder/image3.png - Object Detection: + Object Detection + '''''''''''''''' One or more CSV files where each line is a single column: @@ -766,9 +781,11 @@ class BatchPredictInputConfig(proto.Message): gs://folder/image2.gif gs://folder/image3.png - AutoML Video Intelligence: + AutoML Video Intelligence + ^^^^^^^^^^^^^^^^^^^^^^^^^ - Classification: + Classification + '''''''''''''' One or more CSV files where each line is a single column: @@ -791,7 +808,8 @@ class BatchPredictInputConfig(proto.Message): gs://folder/video1.mp4,20,60 gs://folder/vid2.mov,0,inf - Object Tracking: + Object Tracking + ''''''''''''''' One or more CSV files where each line is a single column: @@ -814,9 +832,11 @@ class BatchPredictInputConfig(proto.Message): gs://folder/video1.mp4,20,60 gs://folder/vid2.mov,0,inf - AutoML Natural Language: + AutoML Natural Language + ^^^^^^^^^^^^^^^^^^^^^^^ - Classification: + Classification + '''''''''''''' One or more CSV files where each line is a single column: @@ -837,7 +857,8 @@ class BatchPredictInputConfig(proto.Message): gs://folder/text2.pdf gs://folder/text3.tif - Sentiment Analysis: + Sentiment Analysis + '''''''''''''''''' One or more CSV files where each line is a single column: @@ -858,7 +879,8 @@ class BatchPredictInputConfig(proto.Message): gs://folder/text2.pdf gs://folder/text3.tif - Entity Extraction: + Entity Extraction + ''''''''''''''''' One or more JSONL (JSON Lines) files that either provide inline text or documents. You can only use one format, either inline text or @@ -933,7 +955,8 @@ class BatchPredictInputConfig(proto.Message): } } - AutoML Tables: + AutoML Tables + ^^^^^^^^^^^^^ See `Preparing your training data `__ diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py index 1230672bab9e..558c8aab67c5 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py index 0513f1f38e17..92d6f0794d5e 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/async_client.py @@ -408,7 +408,10 @@ async def sample_create_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, dataset]) + flattened_params = [parent, dataset] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -520,7 +523,10 @@ async def sample_get_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -632,7 +638,10 @@ async def sample_list_datasets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -757,7 +766,10 @@ async def sample_update_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([dataset]) + flattened_params = [dataset] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -885,7 +897,10 @@ async def sample_delete_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1034,7 +1049,10 @@ async def sample_import_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config]) + flattened_params = [name, input_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1177,7 +1195,10 @@ async def sample_export_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1293,7 +1314,10 @@ async def sample_get_annotation_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1409,7 +1433,10 @@ async def sample_get_table_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1521,7 +1548,10 @@ async def sample_list_table_specs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1647,7 +1677,10 @@ async def sample_update_table_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([table_spec]) + flattened_params = [table_spec] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1757,7 +1790,10 @@ async def sample_get_column_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1869,7 +1905,10 @@ async def sample_list_column_specs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1987,7 +2026,10 @@ async def sample_update_column_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([column_spec]) + flattened_params = [column_spec] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2115,7 +2157,10 @@ async def sample_create_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, model]) + flattened_params = [parent, model] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2231,7 +2276,10 @@ async def sample_get_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2343,7 +2391,10 @@ async def sample_list_models(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2479,7 +2530,10 @@ async def sample_delete_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2623,7 +2677,10 @@ async def sample_deploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2761,7 +2818,10 @@ async def sample_undeploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2909,7 +2969,10 @@ async def sample_export_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3064,7 +3127,10 @@ async def sample_export_evaluated_examples(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3180,7 +3246,10 @@ async def sample_get_model_evaluation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3295,7 +3364,10 @@ async def sample_list_model_evaluations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py index e4718068c738..657071f6e5ab 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/auto_ml/client.py @@ -946,7 +946,10 @@ def sample_create_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, dataset]) + flattened_params = [parent, dataset] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1055,7 +1058,10 @@ def sample_get_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1164,7 +1170,10 @@ def sample_list_datasets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1286,7 +1295,10 @@ def sample_update_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([dataset]) + flattened_params = [dataset] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1411,7 +1423,10 @@ def sample_delete_dataset(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1557,7 +1572,10 @@ def sample_import_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config]) + flattened_params = [name, input_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1697,7 +1715,10 @@ def sample_export_data(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1810,7 +1831,10 @@ def sample_get_annotation_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1923,7 +1947,10 @@ def sample_get_table_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2032,7 +2059,10 @@ def sample_list_table_specs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2155,7 +2185,10 @@ def sample_update_table_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([table_spec]) + flattened_params = [table_spec] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2262,7 +2295,10 @@ def sample_get_column_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2371,7 +2407,10 @@ def sample_list_column_specs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2486,7 +2525,10 @@ def sample_update_column_spec(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([column_spec]) + flattened_params = [column_spec] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2611,7 +2653,10 @@ def sample_create_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, model]) + flattened_params = [parent, model] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2724,7 +2769,10 @@ def sample_get_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2833,7 +2881,10 @@ def sample_list_models(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2966,7 +3017,10 @@ def sample_delete_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3107,7 +3161,10 @@ def sample_deploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3242,7 +3299,10 @@ def sample_undeploy_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3387,7 +3447,10 @@ def sample_export_model(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3539,7 +3602,10 @@ def sample_export_evaluated_examples(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3654,7 +3720,10 @@ def sample_get_model_evaluation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3766,7 +3835,10 @@ def sample_list_model_evaluations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py index a6fb80b73b8d..5fea70fdd9ab 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/async_client.py @@ -429,7 +429,10 @@ async def sample_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, payload, params]) + flattened_params = [name, payload, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -670,7 +673,10 @@ async def sample_batch_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config, output_config, params]) + flattened_params = [name, input_config, output_config, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py index 57c510136c4c..5c90a25bfe55 100644 --- a/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py +++ b/packages/google-cloud-automl/google/cloud/automl_v1beta1/services/prediction_service/client.py @@ -853,7 +853,10 @@ def sample_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, payload, params]) + flattened_params = [name, payload, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1092,7 +1095,10 @@ def sample_batch_predict(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config, output_config, params]) + flattened_params = [name, input_config, output_config, params] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json index 44c69a5b7aae..0d8816baffc0 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.16.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json index e1a1afbdf39a..3957ec9b1b9f 100644 --- a/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json +++ b/packages/google-cloud-automl/samples/generated_samples/snippet_metadata_google.cloud.automl.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-automl", - "version": "2.16.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-automl/setup.py b/packages/google-cloud-automl/setup.py index 5c7fdfff2b30..15d0a07fa1cb 100644 --- a/packages/google-cloud-automl/setup.py +++ b/packages/google-cloud-automl/setup.py @@ -50,7 +50,7 @@ extras = { "libcst": "libcst >= 0.2.5", "pandas": ["pandas>=1.0.5"], - "storage": ["google-cloud-storage >=1.18.0, <4.0.0dev"], + "storage": ["google-cloud-storage >=1.18.0, <3.0.0dev"], } url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-automl" diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index d1a1a883babd..558c8aab67c5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index d1a1a883babd..558c8aab67c5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.2" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index c19c93260a4c..60664b91c8ab 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -387,7 +387,10 @@ async def sample_list_management_servers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -507,7 +510,10 @@ async def sample_get_management_server(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -645,7 +651,10 @@ async def sample_create_management_server(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, management_server, management_server_id]) + flattened_params = [parent, management_server, management_server_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -777,7 +786,10 @@ async def sample_delete_management_server(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -914,7 +926,10 @@ async def sample_create_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_vault, backup_vault_id]) + flattened_params = [parent, backup_vault, backup_vault_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1043,7 +1058,10 @@ async def sample_list_backup_vaults(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1175,7 +1193,10 @@ async def sample_fetch_usable_backup_vaults(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1295,7 +1316,10 @@ async def sample_get_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1419,7 +1443,10 @@ async def sample_update_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_vault, update_mask]) + flattened_params = [backup_vault, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1550,7 +1577,10 @@ async def sample_delete_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1674,7 +1704,10 @@ async def sample_list_data_sources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1796,7 +1829,10 @@ async def sample_get_data_source(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1920,7 +1956,10 @@ async def sample_update_data_source(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_source, update_mask]) + flattened_params = [data_source, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2047,7 +2086,10 @@ async def sample_list_backups(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2164,7 +2206,10 @@ async def sample_get_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2288,7 +2333,10 @@ async def sample_update_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup, update_mask]) + flattened_params = [backup, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2412,7 +2460,10 @@ async def sample_delete_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2541,7 +2592,10 @@ async def sample_restore_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2695,7 +2749,10 @@ async def sample_create_backup_plan(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_plan, backup_plan_id]) + flattened_params = [parent, backup_plan, backup_plan_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2819,7 +2876,10 @@ async def sample_get_backup_plan(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2933,7 +2993,10 @@ async def sample_list_backup_plans(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3068,7 +3131,10 @@ async def sample_delete_backup_plan(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3220,8 +3286,9 @@ async def sample_create_backup_plan_association(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [parent, backup_plan_association, backup_plan_association_id] + flattened_params = [parent, backup_plan_association, backup_plan_association_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -3349,7 +3416,10 @@ async def sample_get_backup_plan_association(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3470,7 +3540,10 @@ async def sample_list_backup_plan_associations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3608,7 +3681,10 @@ async def sample_delete_backup_plan_association(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3744,7 +3820,10 @@ async def sample_trigger_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, rule_id]) + flattened_params = [name, rule_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index 063f960859f8..46bcfd19708c 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -917,7 +917,10 @@ def sample_list_management_servers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1034,7 +1037,10 @@ def sample_get_management_server(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1169,7 +1175,10 @@ def sample_create_management_server(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, management_server, management_server_id]) + flattened_params = [parent, management_server, management_server_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1298,7 +1307,10 @@ def sample_delete_management_server(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1432,7 +1444,10 @@ def sample_create_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_vault, backup_vault_id]) + flattened_params = [parent, backup_vault, backup_vault_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1558,7 +1573,10 @@ def sample_list_backup_vaults(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1687,7 +1705,10 @@ def sample_fetch_usable_backup_vaults(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1806,7 +1827,10 @@ def sample_get_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1927,7 +1951,10 @@ def sample_update_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup_vault, update_mask]) + flattened_params = [backup_vault, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2055,7 +2082,10 @@ def sample_delete_backup_vault(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2176,7 +2206,10 @@ def sample_list_data_sources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2295,7 +2328,10 @@ def sample_get_data_source(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2416,7 +2452,10 @@ def sample_update_data_source(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_source, update_mask]) + flattened_params = [data_source, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2540,7 +2579,10 @@ def sample_list_backups(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2654,7 +2696,10 @@ def sample_get_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2775,7 +2820,10 @@ def sample_update_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([backup, update_mask]) + flattened_params = [backup, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2896,7 +2944,10 @@ def sample_delete_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3022,7 +3073,10 @@ def sample_restore_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3173,7 +3227,10 @@ def sample_create_backup_plan(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, backup_plan, backup_plan_id]) + flattened_params = [parent, backup_plan, backup_plan_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3294,7 +3351,10 @@ def sample_get_backup_plan(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3405,7 +3465,10 @@ def sample_list_backup_plans(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3537,7 +3600,10 @@ def sample_delete_backup_plan(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3686,8 +3752,9 @@ def sample_create_backup_plan_association(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [parent, backup_plan_association, backup_plan_association_id] + flattened_params = [parent, backup_plan_association, backup_plan_association_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -3814,7 +3881,10 @@ def sample_get_backup_plan_association(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3934,7 +4004,10 @@ def sample_list_backup_plan_associations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4071,7 +4144,10 @@ def sample_delete_backup_plan_association(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4206,7 +4282,10 @@ def sample_trigger_backup(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, rule_id]) + flattened_params = [name, rule_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index b1c74a350331..0f3972263529 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.2.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py index f1d827b5c728..558c8aab67c5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py index f1d827b5c728..558c8aab67c5 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py index 36b967234fe9..7adf747b6026 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/async_client.py @@ -450,7 +450,10 @@ async def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -565,7 +568,10 @@ async def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -691,7 +697,10 @@ async def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, update_mask]) + flattened_params = [instance, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -819,7 +828,10 @@ async def sample_rename_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_instance_id]) + flattened_params = [name, new_instance_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -935,7 +947,10 @@ async def sample_reset_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1055,7 +1070,10 @@ async def sample_start_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1175,7 +1193,10 @@ async def sample_stop_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1299,7 +1320,10 @@ async def sample_enable_interactive_serial_console(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1423,7 +1447,10 @@ async def sample_disable_interactive_serial_console(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1551,7 +1578,10 @@ async def sample_detach_lun(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, lun]) + flattened_params = [instance, lun] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1676,7 +1706,10 @@ async def sample_list_ssh_keys(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1813,7 +1846,10 @@ async def sample_create_ssh_key(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, ssh_key, ssh_key_id]) + flattened_params = [parent, ssh_key, ssh_key_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1918,7 +1954,10 @@ async def sample_delete_ssh_key(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2027,7 +2066,10 @@ async def sample_list_volumes(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2142,7 +2184,10 @@ async def sample_get_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2268,7 +2313,10 @@ async def sample_update_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, update_mask]) + flattened_params = [volume, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2396,7 +2444,10 @@ async def sample_rename_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_volume_id]) + flattened_params = [name, new_volume_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2519,7 +2570,10 @@ async def sample_evict_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2645,7 +2699,10 @@ async def sample_resize_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, size_gib]) + flattened_params = [volume, size_gib] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2767,7 +2824,10 @@ async def sample_list_networks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2885,7 +2945,10 @@ async def sample_list_network_usage(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([location]) + flattened_params = [location] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2989,7 +3052,10 @@ async def sample_get_network(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3115,7 +3181,10 @@ async def sample_update_network(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([network, update_mask]) + flattened_params = [network, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3242,7 +3311,10 @@ async def sample_create_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, volume_snapshot]) + flattened_params = [parent, volume_snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3364,7 +3436,10 @@ async def sample_restore_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume_snapshot]) + flattened_params = [volume_snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3476,7 +3551,10 @@ async def sample_delete_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3580,7 +3658,10 @@ async def sample_get_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3696,7 +3777,10 @@ async def sample_list_volume_snapshots(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3814,7 +3898,10 @@ async def sample_get_lun(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3924,7 +4011,10 @@ async def sample_list_luns(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4056,7 +4146,10 @@ async def sample_evict_lun(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4168,7 +4261,10 @@ async def sample_get_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4280,7 +4376,10 @@ async def sample_list_nfs_shares(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4416,7 +4515,10 @@ async def sample_update_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([nfs_share, update_mask]) + flattened_params = [nfs_share, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4548,7 +4650,10 @@ async def sample_create_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, nfs_share]) + flattened_params = [parent, nfs_share] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4674,7 +4779,10 @@ async def sample_rename_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_nfsshare_id]) + flattened_params = [name, new_nfsshare_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4798,7 +4906,10 @@ async def sample_delete_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4921,7 +5032,10 @@ async def sample_list_provisioning_quotas(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5051,7 +5165,10 @@ async def sample_submit_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, provisioning_config]) + flattened_params = [parent, provisioning_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5160,7 +5277,10 @@ async def sample_get_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5276,7 +5396,10 @@ async def sample_create_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, provisioning_config]) + flattened_params = [parent, provisioning_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5393,7 +5516,10 @@ async def sample_update_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([provisioning_config, update_mask]) + flattened_params = [provisioning_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5513,7 +5639,10 @@ async def sample_rename_network(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_network_id]) + flattened_params = [name, new_network_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5628,7 +5757,10 @@ async def sample_list_os_images(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py index a36d016965c9..393c56069365 100644 --- a/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py +++ b/packages/google-cloud-bare-metal-solution/google/cloud/bare_metal_solution_v2/services/bare_metal_solution/client.py @@ -1152,7 +1152,10 @@ def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1264,7 +1267,10 @@ def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1387,7 +1393,10 @@ def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, update_mask]) + flattened_params = [instance, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1512,7 +1521,10 @@ def sample_rename_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_instance_id]) + flattened_params = [name, new_instance_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1625,7 +1637,10 @@ def sample_reset_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1742,7 +1757,10 @@ def sample_start_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1859,7 +1877,10 @@ def sample_stop_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1980,7 +2001,10 @@ def sample_enable_interactive_serial_console(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2103,7 +2127,10 @@ def sample_disable_interactive_serial_console(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2230,7 +2257,10 @@ def sample_detach_lun(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([instance, lun]) + flattened_params = [instance, lun] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2352,7 +2382,10 @@ def sample_list_ssh_keys(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2486,7 +2519,10 @@ def sample_create_ssh_key(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, ssh_key, ssh_key_id]) + flattened_params = [parent, ssh_key, ssh_key_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2588,7 +2624,10 @@ def sample_delete_ssh_key(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2694,7 +2733,10 @@ def sample_list_volumes(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2806,7 +2848,10 @@ def sample_get_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2929,7 +2974,10 @@ def sample_update_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, update_mask]) + flattened_params = [volume, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3054,7 +3102,10 @@ def sample_rename_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_volume_id]) + flattened_params = [name, new_volume_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3174,7 +3225,10 @@ def sample_evict_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3297,7 +3351,10 @@ def sample_resize_volume(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume, size_gib]) + flattened_params = [volume, size_gib] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3416,7 +3473,10 @@ def sample_list_networks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3531,7 +3591,10 @@ def sample_list_network_usage(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([location]) + flattened_params = [location] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3632,7 +3695,10 @@ def sample_get_network(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3755,7 +3821,10 @@ def sample_update_network(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([network, update_mask]) + flattened_params = [network, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3879,7 +3948,10 @@ def sample_create_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, volume_snapshot]) + flattened_params = [parent, volume_snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3998,7 +4070,10 @@ def sample_restore_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([volume_snapshot]) + flattened_params = [volume_snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4107,7 +4182,10 @@ def sample_delete_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4208,7 +4286,10 @@ def sample_get_volume_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4321,7 +4402,10 @@ def sample_list_volume_snapshots(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4436,7 +4520,10 @@ def sample_get_lun(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4545,7 +4632,10 @@ def sample_list_luns(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4674,7 +4764,10 @@ def sample_evict_lun(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4783,7 +4876,10 @@ def sample_get_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -4892,7 +4988,10 @@ def sample_list_nfs_shares(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5025,7 +5124,10 @@ def sample_update_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([nfs_share, update_mask]) + flattened_params = [nfs_share, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5154,7 +5256,10 @@ def sample_create_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, nfs_share]) + flattened_params = [parent, nfs_share] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5277,7 +5382,10 @@ def sample_rename_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_nfsshare_id]) + flattened_params = [name, new_nfsshare_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5398,7 +5506,10 @@ def sample_delete_nfs_share(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5518,7 +5629,10 @@ def sample_list_provisioning_quotas(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5645,7 +5759,10 @@ def sample_submit_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, provisioning_config]) + flattened_params = [parent, provisioning_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5753,7 +5870,10 @@ def sample_get_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5866,7 +5986,10 @@ def sample_create_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, provisioning_config]) + flattened_params = [parent, provisioning_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -5982,7 +6105,10 @@ def sample_update_provisioning_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([provisioning_config, update_mask]) + flattened_params = [provisioning_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6101,7 +6227,10 @@ def sample_rename_network(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_network_id]) + flattened_params = [name, new_network_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -6213,7 +6342,10 @@ def sample_list_os_images(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json index 6ec0afe89b63..2e341fc16178 100644 --- a/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json +++ b/packages/google-cloud-bare-metal-solution/samples/generated_samples/snippet_metadata_google.cloud.baremetalsolution.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bare-metal-solution", - "version": "1.10.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py index f7da358ad5fc..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.34" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py index f7da358ad5fc..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.34" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py index 8df5a7e7bcf7..a9c0c72df6b9 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/async_client.py @@ -384,7 +384,10 @@ async def sample_create_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job, job_id]) + flattened_params = [parent, job, job_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -491,7 +494,10 @@ async def sample_get_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -607,7 +613,10 @@ async def sample_delete_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -727,7 +736,10 @@ async def sample_cancel_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -843,7 +855,10 @@ async def sample_list_jobs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -957,7 +972,10 @@ async def sample_get_task(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1067,7 +1085,10 @@ async def sample_list_tasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py index 37cfc2a957c4..894b1a4627e5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1/services/batch_service/client.py @@ -852,7 +852,10 @@ def sample_create_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job, job_id]) + flattened_params = [parent, job, job_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -956,7 +959,10 @@ def sample_get_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1071,7 +1077,10 @@ def sample_delete_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1188,7 +1197,10 @@ def sample_cancel_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1301,7 +1313,10 @@ def sample_list_jobs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1412,7 +1427,10 @@ def sample_get_task(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1521,7 +1539,10 @@ def sample_list_tasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py index f7da358ad5fc..558c8aab67c5 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.17.34" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py index 5be36827accf..d78a9a6857c9 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/async_client.py @@ -394,7 +394,10 @@ async def sample_create_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job, job_id]) + flattened_params = [parent, job, job_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -501,7 +504,10 @@ async def sample_get_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -617,7 +623,10 @@ async def sample_delete_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -737,7 +746,10 @@ async def sample_cancel_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -876,7 +888,10 @@ async def sample_update_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([job, update_mask]) + flattened_params = [job, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -986,7 +1001,10 @@ async def sample_list_jobs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1100,7 +1118,10 @@ async def sample_get_task(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1210,7 +1231,10 @@ async def sample_list_tasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1363,7 +1387,10 @@ async def sample_create_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, resource_allowance, resource_allowance_id]) + flattened_params = [parent, resource_allowance, resource_allowance_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1475,7 +1502,10 @@ async def sample_get_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1594,7 +1624,10 @@ async def sample_delete_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1712,7 +1745,10 @@ async def sample_list_resource_allowances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1855,7 +1891,10 @@ async def sample_update_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_allowance, update_mask]) + flattened_params = [resource_allowance, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py index b4e2b0f2247e..8c1569758349 100644 --- a/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py +++ b/packages/google-cloud-batch/google/cloud/batch_v1alpha/services/batch_service/client.py @@ -880,7 +880,10 @@ def sample_create_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job, job_id]) + flattened_params = [parent, job, job_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -984,7 +987,10 @@ def sample_get_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1099,7 +1105,10 @@ def sample_delete_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1216,7 +1225,10 @@ def sample_cancel_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1352,7 +1364,10 @@ def sample_update_job(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([job, update_mask]) + flattened_params = [job, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1459,7 +1474,10 @@ def sample_list_jobs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1570,7 +1588,10 @@ def sample_get_task(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1679,7 +1700,10 @@ def sample_list_tasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1829,7 +1853,10 @@ def sample_create_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, resource_allowance, resource_allowance_id]) + flattened_params = [parent, resource_allowance, resource_allowance_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1940,7 +1967,10 @@ def sample_get_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2056,7 +2086,10 @@ def sample_delete_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2173,7 +2206,10 @@ def sample_list_resource_allowances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2313,7 +2349,10 @@ def sample_update_resource_allowance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_allowance, update_mask]) + flattened_params = [resource_allowance, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json index e9a7f2a3ba58..f80cbfd5e2e8 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.34" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json index 0fac004369d3..981b9c62ced1 100644 --- a/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json +++ b/packages/google-cloud-batch/samples/generated_samples/snippet_metadata_google.cloud.batch.v1alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-batch", - "version": "0.17.34" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py index 8376eb91773f..8396fae51a10 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/async_client.py @@ -400,7 +400,10 @@ async def sample_list_app_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -527,7 +530,10 @@ async def sample_get_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -676,7 +682,10 @@ async def sample_create_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, app_connection, app_connection_id]) + flattened_params = [parent, app_connection, app_connection_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -830,7 +839,10 @@ async def sample_update_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([app_connection, update_mask]) + flattened_params = [app_connection, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -966,7 +978,10 @@ async def sample_delete_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1092,7 +1107,10 @@ async def sample_resolve_app_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py index b018b8b9e8ec..e5d63db3c336 100644 --- a/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnections/google/cloud/beyondcorp_appconnections_v1/services/app_connections_service/client.py @@ -860,7 +860,10 @@ def sample_list_app_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -984,7 +987,10 @@ def sample_get_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1130,7 +1136,10 @@ def sample_create_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, app_connection, app_connection_id]) + flattened_params = [parent, app_connection, app_connection_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1281,7 +1290,10 @@ def sample_update_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([app_connection, update_mask]) + flattened_params = [app_connection, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1414,7 +1426,10 @@ def sample_delete_app_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1537,7 +1552,10 @@ def sample_resolve_app_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json index 55d20b32008a..f730a69adfa5 100644 --- a/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json +++ b/packages/google-cloud-beyondcorp-appconnections/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnections.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnections", - "version": "0.4.16" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py index 4136983386c7..ffe636740f04 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/async_client.py @@ -394,7 +394,10 @@ async def sample_list_app_connectors(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -523,7 +526,10 @@ async def sample_get_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -670,7 +676,10 @@ async def sample_create_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, app_connector, app_connector_id]) + flattened_params = [parent, app_connector, app_connector_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -819,7 +828,10 @@ async def sample_update_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([app_connector, update_mask]) + flattened_params = [app_connector, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -955,7 +967,10 @@ async def sample_delete_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1095,7 +1110,10 @@ async def sample_report_status(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([app_connector, resource_info]) + flattened_params = [app_connector, resource_info] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py index 31355c0d2fce..815e393050fd 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py +++ b/packages/google-cloud-beyondcorp-appconnectors/google/cloud/beyondcorp_appconnectors_v1/services/app_connectors_service/client.py @@ -816,7 +816,10 @@ def sample_list_app_connectors(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -942,7 +945,10 @@ def sample_get_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1086,7 +1092,10 @@ def sample_create_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, app_connector, app_connector_id]) + flattened_params = [parent, app_connector, app_connector_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1232,7 +1241,10 @@ def sample_update_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([app_connector, update_mask]) + flattened_params = [app_connector, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1365,7 +1377,10 @@ def sample_delete_app_connector(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1502,7 +1517,10 @@ def sample_report_status(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([app_connector, resource_info]) + flattened_params = [app_connector, resource_info] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json index e734420e1b6c..e1535607b299 100644 --- a/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json +++ b/packages/google-cloud-beyondcorp-appconnectors/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appconnectors.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appconnectors", - "version": "0.4.16" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py index 7b75a140520e..67e174c54ca6 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/async_client.py @@ -387,7 +387,10 @@ async def sample_list_app_gateways(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -514,7 +517,10 @@ async def sample_get_app_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -661,7 +667,10 @@ async def sample_create_app_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, app_gateway, app_gateway_id]) + flattened_params = [parent, app_gateway, app_gateway_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -797,7 +806,10 @@ async def sample_delete_app_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py index 7a58c56db9e8..7dcbb42854a3 100644 --- a/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-appgateways/google/cloud/beyondcorp_appgateways_v1/services/app_gateways_service/client.py @@ -811,7 +811,10 @@ def sample_list_app_gateways(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -935,7 +938,10 @@ def sample_get_app_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1079,7 +1085,10 @@ def sample_create_app_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, app_gateway, app_gateway_id]) + flattened_params = [parent, app_gateway, app_gateway_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1212,7 +1221,10 @@ def sample_delete_app_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json index a7a8d7e3f670..0b3067b40e2b 100644 --- a/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json +++ b/packages/google-cloud-beyondcorp-appgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.appgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-appgateways", - "version": "0.4.16" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py index 1319545610b6..c17d649086aa 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/async_client.py @@ -407,7 +407,10 @@ async def sample_list_client_connector_services(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -535,7 +538,10 @@ async def sample_get_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -692,8 +698,13 @@ async def sample_create_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [parent, client_connector_service, client_connector_service_id] + flattened_params = [ + parent, + client_connector_service, + client_connector_service_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -856,7 +867,10 @@ async def sample_update_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([client_connector_service, update_mask]) + flattened_params = [client_connector_service, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1005,7 +1019,10 @@ async def sample_delete_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py index f70d636c349a..e3f52f7f5682 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/google/cloud/beyondcorp_clientconnectorservices_v1/services/client_connector_services_service/client.py @@ -833,7 +833,10 @@ def sample_list_client_connector_services(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -960,7 +963,10 @@ def sample_get_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1116,8 +1122,13 @@ def sample_create_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any( - [parent, client_connector_service, client_connector_service_id] + flattened_params = [ + parent, + client_connector_service, + client_connector_service_id, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 ) if request is not None and has_flattened_params: raise ValueError( @@ -1279,7 +1290,10 @@ def sample_update_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([client_connector_service, update_mask]) + flattened_params = [client_connector_service, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1427,7 +1441,10 @@ def sample_delete_client_connector_service(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json index f849b5baf297..e2e2d8f829d9 100644 --- a/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json +++ b/packages/google-cloud-beyondcorp-clientconnectorservices/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientconnectorservices.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientconnectorservices", - "version": "0.4.16" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py index 49a0d50535a0..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.15" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py index 49a0d50535a0..558c8aab67c5 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.15" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py index aea553eee6d2..5d2044139018 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/async_client.py @@ -387,7 +387,10 @@ async def sample_list_client_gateways(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -505,7 +508,10 @@ async def sample_get_client_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -642,7 +648,10 @@ async def sample_create_client_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, client_gateway, client_gateway_id]) + flattened_params = [parent, client_gateway, client_gateway_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -775,7 +784,10 @@ async def sample_delete_client_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py index cb5d86fbf810..b840fae646ea 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py +++ b/packages/google-cloud-beyondcorp-clientgateways/google/cloud/beyondcorp_clientgateways_v1/services/client_gateways_service/client.py @@ -809,7 +809,10 @@ def sample_list_client_gateways(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -924,7 +927,10 @@ def sample_get_client_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1058,7 +1064,10 @@ def sample_create_client_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, client_gateway, client_gateway_id]) + flattened_params = [parent, client_gateway, client_gateway_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1188,7 +1197,10 @@ def sample_delete_client_gateway(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json index fd880e84f0b5..1aa87eed26ed 100644 --- a/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json +++ b/packages/google-cloud-beyondcorp-clientgateways/samples/generated_samples/snippet_metadata_google.cloud.beyondcorp.clientgateways.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-beyondcorp-clientgateways", - "version": "0.4.15" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py index dd30b4866aeb..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.16" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py index f4c84a1d5cb1..cda824c480fc 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/async_client.py @@ -388,7 +388,10 @@ async def sample_list_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -516,7 +519,10 @@ async def sample_list_org_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([organization]) + flattened_params = [organization] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -639,7 +645,10 @@ async def sample_get_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -762,7 +771,10 @@ async def sample_create_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_exchange]) + flattened_params = [parent, data_exchange] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -887,7 +899,10 @@ async def sample_update_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_exchange, update_mask]) + flattened_params = [data_exchange, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -990,7 +1005,10 @@ async def sample_delete_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1099,7 +1117,10 @@ async def sample_list_listings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1221,7 +1242,10 @@ async def sample_get_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1343,7 +1367,10 @@ async def sample_create_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, listing]) + flattened_params = [parent, listing] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1467,7 +1494,10 @@ async def sample_update_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([listing, update_mask]) + flattened_params = [listing, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1569,7 +1599,10 @@ async def sample_delete_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1686,7 +1719,10 @@ async def sample_subscribe_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1808,7 +1844,10 @@ async def sample_subscribe_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1935,7 +1974,10 @@ async def sample_refresh_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2054,7 +2096,10 @@ async def sample_get_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2167,7 +2212,10 @@ async def sample_list_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2297,7 +2345,10 @@ async def sample_list_shared_resource_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2416,7 +2467,10 @@ async def sample_revoke_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2538,7 +2592,10 @@ async def sample_delete_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py index 9a7ebf8a012c..3cc69b94e171 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/services/analytics_hub_service/client.py @@ -883,7 +883,10 @@ def sample_list_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1008,7 +1011,10 @@ def sample_list_org_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([organization]) + flattened_params = [organization] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1128,7 +1134,10 @@ def sample_get_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1248,7 +1257,10 @@ def sample_create_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_exchange]) + flattened_params = [parent, data_exchange] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1370,7 +1382,10 @@ def sample_update_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_exchange, update_mask]) + flattened_params = [data_exchange, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1470,7 +1485,10 @@ def sample_delete_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1576,7 +1594,10 @@ def sample_list_listings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1695,7 +1716,10 @@ def sample_get_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1814,7 +1838,10 @@ def sample_create_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, listing]) + flattened_params = [parent, listing] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1935,7 +1962,10 @@ def sample_update_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([listing, update_mask]) + flattened_params = [listing, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2034,7 +2064,10 @@ def sample_delete_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2148,7 +2181,10 @@ def sample_subscribe_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2267,7 +2303,10 @@ def sample_subscribe_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2391,7 +2430,10 @@ def sample_refresh_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2507,7 +2549,10 @@ def sample_get_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2617,7 +2662,10 @@ def sample_list_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2744,7 +2792,10 @@ def sample_list_shared_resource_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2862,7 +2913,10 @@ def sample_revoke_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2981,7 +3035,10 @@ def sample_delete_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json index b3a7d35c90ee..7d9e59976e72 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/snippet_metadata_google.cloud.bigquery.analyticshub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-analyticshub", - "version": "0.4.16" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py index 9b19e5f10e00..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py index 9b19e5f10e00..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py index 67232cf24daa..367e82ff040b 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/async_client.py @@ -391,7 +391,10 @@ async def sample_create_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, catalog, catalog_id]) + flattened_params = [parent, catalog, catalog_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -504,7 +507,10 @@ async def sample_delete_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -612,7 +618,10 @@ async def sample_get_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -725,7 +734,10 @@ async def sample_list_catalogs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -862,7 +874,10 @@ async def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -973,7 +988,10 @@ async def sample_delete_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1094,7 +1112,10 @@ async def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1204,7 +1225,10 @@ async def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1317,7 +1341,10 @@ async def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1453,7 +1480,10 @@ async def sample_create_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, table, table_id]) + flattened_params = [parent, table, table_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1563,7 +1593,10 @@ async def sample_delete_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1683,7 +1716,10 @@ async def sample_update_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([table, update_mask]) + flattened_params = [table, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1804,7 +1840,10 @@ async def sample_rename_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_name]) + flattened_params = [name, new_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1912,7 +1951,10 @@ async def sample_get_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2025,7 +2067,10 @@ async def sample_list_tables(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py index f9c06970195b..e00ea90b23d7 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1/services/metastore_service/client.py @@ -861,7 +861,10 @@ def sample_create_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, catalog, catalog_id]) + flattened_params = [parent, catalog, catalog_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -971,7 +974,10 @@ def sample_delete_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1076,7 +1082,10 @@ def sample_get_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1186,7 +1195,10 @@ def sample_list_catalogs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1320,7 +1332,10 @@ def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1428,7 +1443,10 @@ def sample_delete_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1546,7 +1564,10 @@ def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1653,7 +1674,10 @@ def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1763,7 +1787,10 @@ def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1896,7 +1923,10 @@ def sample_create_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, table, table_id]) + flattened_params = [parent, table, table_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2003,7 +2033,10 @@ def sample_delete_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2120,7 +2153,10 @@ def sample_update_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([table, update_mask]) + flattened_params = [table, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2238,7 +2274,10 @@ def sample_rename_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_name]) + flattened_params = [name, new_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2343,7 +2382,10 @@ def sample_get_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2453,7 +2495,10 @@ def sample_list_tables(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py index 9b19e5f10e00..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.4.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py index e728a4494fbb..7a84c7f00afe 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/async_client.py @@ -393,7 +393,10 @@ async def sample_create_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, catalog, catalog_id]) + flattened_params = [parent, catalog, catalog_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -506,7 +509,10 @@ async def sample_delete_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -614,7 +620,10 @@ async def sample_get_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -727,7 +736,10 @@ async def sample_list_catalogs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -864,7 +876,10 @@ async def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -975,7 +990,10 @@ async def sample_delete_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1096,7 +1114,10 @@ async def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1206,7 +1227,10 @@ async def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1319,7 +1343,10 @@ async def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1455,7 +1482,10 @@ async def sample_create_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, table, table_id]) + flattened_params = [parent, table, table_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1565,7 +1595,10 @@ async def sample_delete_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1685,7 +1718,10 @@ async def sample_update_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([table, update_mask]) + flattened_params = [table, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1806,7 +1842,10 @@ async def sample_rename_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_name]) + flattened_params = [name, new_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1914,7 +1953,10 @@ async def sample_get_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2027,7 +2069,10 @@ async def sample_list_tables(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2157,7 +2202,10 @@ async def sample_create_lock(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, lock]) + flattened_params = [parent, lock] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2258,7 +2306,10 @@ async def sample_delete_lock(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2361,7 +2412,10 @@ async def sample_check_lock(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2474,7 +2528,10 @@ async def sample_list_locks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py index f463ed5ae52a..e5b65832de13 100644 --- a/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py +++ b/packages/google-cloud-bigquery-biglake/google/cloud/bigquery_biglake_v1alpha1/services/metastore_service/client.py @@ -887,7 +887,10 @@ def sample_create_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, catalog, catalog_id]) + flattened_params = [parent, catalog, catalog_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -997,7 +1000,10 @@ def sample_delete_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1102,7 +1108,10 @@ def sample_get_catalog(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1212,7 +1221,10 @@ def sample_list_catalogs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1346,7 +1358,10 @@ def sample_create_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, database, database_id]) + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1454,7 +1469,10 @@ def sample_delete_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1572,7 +1590,10 @@ def sample_update_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([database, update_mask]) + flattened_params = [database, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1679,7 +1700,10 @@ def sample_get_database(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1789,7 +1813,10 @@ def sample_list_databases(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1922,7 +1949,10 @@ def sample_create_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, table, table_id]) + flattened_params = [parent, table, table_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2029,7 +2059,10 @@ def sample_delete_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2146,7 +2179,10 @@ def sample_update_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([table, update_mask]) + flattened_params = [table, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2264,7 +2300,10 @@ def sample_rename_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_name]) + flattened_params = [name, new_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2369,7 +2408,10 @@ def sample_get_table(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2479,7 +2521,10 @@ def sample_list_tables(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2606,7 +2651,10 @@ def sample_create_lock(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, lock]) + flattened_params = [parent, lock] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2704,7 +2752,10 @@ def sample_delete_lock(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2804,7 +2855,10 @@ def sample_check_lock(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2914,7 +2968,10 @@ def sample_list_locks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json index 5bc6d5da5ed5..73ac8861a130 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json index 2fdf41e2dd19..69294cf144a2 100644 --- a/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json +++ b/packages/google-cloud-bigquery-biglake/samples/generated_samples/snippet_metadata_google.cloud.bigquery.biglake.v1alpha1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-biglake", - "version": "0.4.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py index ef9777764da2..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py index ef9777764da2..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.1" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py index dd8d371931a5..56c9e9ab24bf 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/async_client.py @@ -379,7 +379,10 @@ async def sample_create_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection, connection_id]) + flattened_params = [parent, connection, connection_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -492,7 +495,10 @@ async def sample_get_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -605,7 +611,10 @@ async def sample_list_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -743,7 +752,10 @@ async def sample_update_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, connection, update_mask]) + flattened_params = [name, connection, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -846,7 +858,10 @@ async def sample_delete_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -983,7 +998,10 @@ async def sample_get_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1122,7 +1140,10 @@ async def sample_set_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1245,7 +1266,10 @@ async def sample_test_iam_permissions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) + flattened_params = [resource, permissions] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py index 69adc52ae137..f8276788ec95 100644 --- a/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py +++ b/packages/google-cloud-bigquery-connection/google/cloud/bigquery_connection_v1/services/connection_service/client.py @@ -845,7 +845,10 @@ def sample_create_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, connection, connection_id]) + flattened_params = [parent, connection, connection_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -955,7 +958,10 @@ def sample_get_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1065,7 +1071,10 @@ def sample_list_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1200,7 +1209,10 @@ def sample_update_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, connection, update_mask]) + flattened_params = [name, connection, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1300,7 +1312,10 @@ def sample_delete_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1434,7 +1449,10 @@ def sample_get_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1574,7 +1592,10 @@ def sample_set_iam_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource]) + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1698,7 +1719,10 @@ def sample_test_iam_permissions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource, permissions]) + flattened_params = [resource, permissions] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json index d7b6c2d22b78..a25b5a29d478 100644 --- a/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json +++ b/packages/google-cloud-bigquery-connection/samples/generated_samples/snippet_metadata_google.cloud.bigquery.connection.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-connection", - "version": "1.18.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py index d26af44492c7..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.18" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py index d26af44492c7..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.5.18" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py index 98adc9606344..0aa04d7f0274 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/async_client.py @@ -380,7 +380,10 @@ async def sample_list_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -508,7 +511,10 @@ async def sample_list_org_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([organization]) + flattened_params = [organization] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -631,7 +637,10 @@ async def sample_get_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -754,7 +763,10 @@ async def sample_create_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_exchange]) + flattened_params = [parent, data_exchange] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -879,7 +891,10 @@ async def sample_update_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_exchange, update_mask]) + flattened_params = [data_exchange, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -982,7 +997,10 @@ async def sample_delete_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1091,7 +1109,10 @@ async def sample_list_listings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1213,7 +1234,10 @@ async def sample_get_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1335,7 +1359,10 @@ async def sample_create_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, listing]) + flattened_params = [parent, listing] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1459,7 +1486,10 @@ async def sample_update_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([listing, update_mask]) + flattened_params = [listing, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1561,7 +1591,10 @@ async def sample_delete_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1678,7 +1711,10 @@ async def sample_subscribe_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py index 7fbfff458083..32ec9a738c6a 100644 --- a/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py +++ b/packages/google-cloud-bigquery-data-exchange/google/cloud/bigquery_data_exchange_v1beta1/services/analytics_hub_service/client.py @@ -837,7 +837,10 @@ def sample_list_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -962,7 +965,10 @@ def sample_list_org_data_exchanges(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([organization]) + flattened_params = [organization] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1082,7 +1088,10 @@ def sample_get_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1202,7 +1211,10 @@ def sample_create_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_exchange]) + flattened_params = [parent, data_exchange] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1324,7 +1336,10 @@ def sample_update_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_exchange, update_mask]) + flattened_params = [data_exchange, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1424,7 +1439,10 @@ def sample_delete_data_exchange(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1530,7 +1548,10 @@ def sample_list_listings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1649,7 +1670,10 @@ def sample_get_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1768,7 +1792,10 @@ def sample_create_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, listing]) + flattened_params = [parent, listing] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1889,7 +1916,10 @@ def sample_update_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([listing, update_mask]) + flattened_params = [listing, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1988,7 +2018,10 @@ def sample_delete_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2102,7 +2135,10 @@ def sample_subscribe_listing(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json index 0e7876665c9e..0103e35cd88a 100644 --- a/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json +++ b/packages/google-cloud-bigquery-data-exchange/samples/generated_samples/snippet_metadata_google.cloud.bigquery.dataexchange.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-data-exchange", - "version": "0.5.18" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py index b72badcc1eca..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py index b72badcc1eca..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py index 6c9cdf345c09..9d6fadc48545 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/async_client.py @@ -378,7 +378,10 @@ async def sample_create_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_policy]) + flattened_params = [parent, data_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -509,7 +512,10 @@ async def sample_update_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_policy, update_mask]) + flattened_params = [data_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -628,7 +634,10 @@ async def sample_rename_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_data_policy_id]) + flattened_params = [name, new_data_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -731,7 +740,10 @@ async def sample_delete_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -835,7 +847,10 @@ async def sample_get_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -949,7 +964,10 @@ async def sample_list_data_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py index c9fbb1e3ae49..dc91c22c6991 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1/services/data_policy_service/client.py @@ -802,7 +802,10 @@ def sample_create_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_policy]) + flattened_params = [parent, data_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -930,7 +933,10 @@ def sample_update_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_policy, update_mask]) + flattened_params = [data_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1046,7 +1052,10 @@ def sample_rename_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, new_data_policy_id]) + flattened_params = [name, new_data_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1146,7 +1155,10 @@ def sample_delete_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1247,7 +1259,10 @@ def sample_get_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1358,7 +1373,10 @@ def sample_list_data_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py index b72badcc1eca..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.6.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py index b6e2c73ebb29..fbcea0a5c9da 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/async_client.py @@ -380,7 +380,10 @@ async def sample_create_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_policy]) + flattened_params = [parent, data_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -511,7 +514,10 @@ async def sample_update_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_policy, update_mask]) + flattened_params = [data_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -616,7 +622,10 @@ async def sample_delete_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -720,7 +729,10 @@ async def sample_get_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -834,7 +846,10 @@ async def sample_list_data_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py index 57fa270142d9..c8cd0d09ab34 100644 --- a/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py +++ b/packages/google-cloud-bigquery-datapolicies/google/cloud/bigquery_datapolicies_v1beta1/services/data_policy_service/client.py @@ -802,7 +802,10 @@ def sample_create_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, data_policy]) + flattened_params = [parent, data_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -930,7 +933,10 @@ def sample_update_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([data_policy, update_mask]) + flattened_params = [data_policy, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1032,7 +1038,10 @@ def sample_delete_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1133,7 +1142,10 @@ def sample_get_data_policy(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1244,7 +1256,10 @@ def sample_list_data_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json index 9ab4a1ce09ed..8f3adb520dfb 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json index da54807d36c0..f5896d1aac3d 100644 --- a/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json +++ b/packages/google-cloud-bigquery-datapolicies/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datapolicies.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datapolicies", - "version": "0.6.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py index 420469d05091..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.19.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 420469d05091..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.19.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 8a87cd16f700..0061b446a9f7 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -376,7 +376,10 @@ async def sample_get_data_source(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -492,7 +495,10 @@ async def sample_list_data_sources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -645,7 +651,10 @@ async def sample_create_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, transfer_config]) + flattened_params = [parent, transfer_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -780,7 +789,10 @@ async def sample_update_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([transfer_config, update_mask]) + flattened_params = [transfer_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -888,7 +900,10 @@ async def sample_delete_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1001,7 +1016,10 @@ async def sample_get_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1115,7 +1133,10 @@ async def sample_list_transfer_configs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1260,7 +1281,10 @@ async def sample_schedule_transfer_runs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, start_time, end_time]) + flattened_params = [parent, start_time, end_time] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1465,7 +1489,10 @@ async def sample_get_transfer_run(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1567,7 +1594,10 @@ async def sample_delete_transfer_run(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1679,7 +1709,10 @@ async def sample_list_transfer_runs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1805,7 +1838,10 @@ async def sample_list_transfer_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1936,7 +1972,10 @@ async def sample_check_valid_creds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index dcfbd3f143a5..d3b25d26637f 100644 --- a/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/packages/google-cloud-bigquery-datatransfer/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -830,7 +830,10 @@ def sample_get_data_source(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -943,7 +946,10 @@ def sample_list_data_sources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1093,7 +1099,10 @@ def sample_create_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, transfer_config]) + flattened_params = [parent, transfer_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1225,7 +1234,10 @@ def sample_update_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([transfer_config, update_mask]) + flattened_params = [transfer_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1330,7 +1342,10 @@ def sample_delete_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1440,7 +1455,10 @@ def sample_get_transfer_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1551,7 +1569,10 @@ def sample_list_transfer_configs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1693,7 +1714,10 @@ def sample_schedule_transfer_runs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, start_time, end_time]) + flattened_params = [parent, start_time, end_time] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1895,7 +1919,10 @@ def sample_get_transfer_run(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1994,7 +2021,10 @@ def sample_delete_transfer_run(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2103,7 +2133,10 @@ def sample_list_transfer_runs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2226,7 +2259,10 @@ def sample_list_transfer_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2354,7 +2390,10 @@ def sample_check_valid_creds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index b139e56d64cc..b0668de9f1d8 100644 --- a/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/packages/google-cloud-bigquery-datatransfer/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.19.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/README.rst b/packages/google-cloud-bigquery-migration/README.rst index 8ea9aa0af789..18c09608a6f1 100644 --- a/packages/google-cloud-bigquery-migration/README.rst +++ b/packages/google-cloud-bigquery-migration/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Google BigQuery Migration.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google BigQuery Migration.: https://cloud.google.com/bigquery/docs/reference/migration/ -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py index 082d8f13abe1..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py index 082d8f13abe1..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py index 688bc627f711..2b010f5caea5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/async_client.py @@ -381,7 +381,10 @@ async def sample_create_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, migration_workflow]) + flattened_params = [parent, migration_workflow] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -495,7 +498,10 @@ async def sample_get_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -609,7 +615,10 @@ async def sample_list_migration_workflows(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -722,7 +731,10 @@ async def sample_delete_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -825,7 +837,10 @@ async def sample_start_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -937,7 +952,10 @@ async def sample_get_migration_subtask(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1050,7 +1068,10 @@ async def sample_list_migration_subtasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py index 6a20be7f6df2..09564b0d0172 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2/services/migration_service/client.py @@ -819,7 +819,10 @@ def sample_create_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, migration_workflow]) + flattened_params = [parent, migration_workflow] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -932,7 +935,10 @@ def sample_get_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1043,7 +1049,10 @@ def sample_list_migration_workflows(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1153,7 +1162,10 @@ def sample_delete_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1255,7 +1267,10 @@ def sample_start_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1364,7 +1379,10 @@ def sample_get_migration_subtask(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1474,7 +1492,10 @@ def sample_list_migration_subtasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py index 082d8f13abe1..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.11.13" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index cdefed995383..f9fcb97b3acb 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -381,7 +381,10 @@ async def sample_create_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, migration_workflow]) + flattened_params = [parent, migration_workflow] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -495,7 +498,10 @@ async def sample_get_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -609,7 +615,10 @@ async def sample_list_migration_workflows(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -722,7 +731,10 @@ async def sample_delete_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -825,7 +837,10 @@ async def sample_start_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -937,7 +952,10 @@ async def sample_get_migration_subtask(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1050,7 +1068,10 @@ async def sample_list_migration_subtasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 7c3f57c69688..07711569ba68 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -819,7 +819,10 @@ def sample_create_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, migration_workflow]) + flattened_params = [parent, migration_workflow] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -932,7 +935,10 @@ def sample_get_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1043,7 +1049,10 @@ def sample_list_migration_workflows(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1153,7 +1162,10 @@ def sample_delete_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1255,7 +1267,10 @@ def sample_start_migration_workflow(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1364,7 +1379,10 @@ def sample_get_migration_subtask(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1474,7 +1492,10 @@ def sample_list_migration_subtasks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py index e5ec26439722..ab11301279e3 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/async_client.py @@ -376,7 +376,10 @@ async def sample_translate_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, source_dialect, query]) + flattened_params = [parent, source_dialect, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py index 339907fd733b..b37aa4ca9b02 100644 --- a/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py +++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery_migration_v2alpha/services/sql_translation_service/client.py @@ -778,7 +778,10 @@ def sample_translate_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, source_dialect, query]) + flattened_params = [parent, source_dialect, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-migration/noxfile.py b/packages/google-cloud-bigquery-migration/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-bigquery-migration/noxfile.py +++ b/packages/google-cloud-bigquery-migration/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json index ccb2a563ff8d..07e44d919e85 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json index 827cb3774f2a..d849f516e6d6 100644 --- a/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json +++ b/packages/google-cloud-bigquery-migration/samples/generated_samples/snippet_metadata_google.cloud.bigquery.migration.v2alpha.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-migration", - "version": "0.11.13" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py index df6fbd91e5db..17d44912167a 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py @@ -412,7 +412,10 @@ async def sample_create_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, reservation, reservation_id]) + flattened_params = [parent, reservation, reservation_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -529,7 +532,10 @@ async def sample_list_reservations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -649,7 +655,10 @@ async def sample_get_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -751,7 +760,10 @@ async def sample_delete_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -863,7 +875,10 @@ async def sample_update_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([reservation, update_mask]) + flattened_params = [reservation, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1088,7 +1103,10 @@ async def sample_create_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, capacity_commitment]) + flattened_params = [parent, capacity_commitment] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1205,7 +1223,10 @@ async def sample_list_capacity_commitments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1337,7 +1358,10 @@ async def sample_get_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1441,7 +1465,10 @@ async def sample_delete_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1574,7 +1601,10 @@ async def sample_update_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([capacity_commitment, update_mask]) + flattened_params = [capacity_commitment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1704,7 +1734,10 @@ async def sample_split_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, slot_count]) + flattened_params = [name, slot_count] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1849,7 +1882,10 @@ async def sample_merge_capacity_commitments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, capacity_commitment_ids]) + flattened_params = [parent, capacity_commitment_ids] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2006,7 +2042,10 @@ async def sample_create_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, assignment]) + flattened_params = [parent, assignment] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2146,7 +2185,10 @@ async def sample_list_assignments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2273,7 +2315,10 @@ async def sample_delete_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2427,7 +2472,10 @@ async def sample_search_assignments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, query]) + flattened_params = [parent, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2590,7 +2638,10 @@ async def sample_search_all_assignments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, query]) + flattened_params = [parent, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2732,7 +2783,10 @@ async def sample_move_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, destination_id]) + flattened_params = [name, destination_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2850,7 +2904,10 @@ async def sample_update_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([assignment, update_mask]) + flattened_params = [assignment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2961,7 +3018,10 @@ async def sample_get_bi_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3078,7 +3138,10 @@ async def sample_update_bi_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([bi_reservation, update_mask]) + flattened_params = [bi_reservation, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py index 4e74d1b430e8..788ae1a6e1ad 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -889,7 +889,10 @@ def sample_create_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, reservation, reservation_id]) + flattened_params = [parent, reservation, reservation_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1003,7 +1006,10 @@ def sample_list_reservations(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1120,7 +1126,10 @@ def sample_get_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1219,7 +1228,10 @@ def sample_delete_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1328,7 +1340,10 @@ def sample_update_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([reservation, update_mask]) + flattened_params = [reservation, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1548,7 +1563,10 @@ def sample_create_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, capacity_commitment]) + flattened_params = [parent, capacity_commitment] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1664,7 +1682,10 @@ def sample_list_capacity_commitments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1795,7 +1816,10 @@ def sample_get_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1896,7 +1920,10 @@ def sample_delete_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2028,7 +2055,10 @@ def sample_update_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([capacity_commitment, update_mask]) + flattened_params = [capacity_commitment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2157,7 +2187,10 @@ def sample_split_capacity_commitment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, slot_count]) + flattened_params = [name, slot_count] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2301,7 +2334,10 @@ def sample_merge_capacity_commitments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, capacity_commitment_ids]) + flattened_params = [parent, capacity_commitment_ids] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2457,7 +2493,10 @@ def sample_create_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, assignment]) + flattened_params = [parent, assignment] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2594,7 +2633,10 @@ def sample_list_assignments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2718,7 +2760,10 @@ def sample_delete_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2869,7 +2914,10 @@ def sample_search_assignments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, query]) + flattened_params = [parent, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3029,7 +3077,10 @@ def sample_search_all_assignments(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, query]) + flattened_params = [parent, query] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3168,7 +3219,10 @@ def sample_move_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, destination_id]) + flattened_params = [name, destination_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3283,7 +3337,10 @@ def sample_update_assignment(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([assignment, update_mask]) + flattened_params = [assignment, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3391,7 +3448,10 @@ def sample_get_bi_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -3505,7 +3565,10 @@ def sample_update_bi_reservation(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([bi_reservation, update_mask]) + flattened_params = [bi_reservation, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json index 3e5fe80fd4a5..4cdd226b3806 100644 --- a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-reservation", - "version": "1.17.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/README.rst b/packages/google-cloud-billing-budgets/README.rst index 35f936c4dbeb..1338802d2889 100644 --- a/packages/google-cloud-billing-budgets/README.rst +++ b/packages/google-cloud-billing-budgets/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Cloud Billing Budget.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Cloud Billing Budget.: https://cloud.google.com/billing/docs/how-to/budget-api-overview -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py index 920480517190..df9d5257f0b2 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/async_client.py @@ -372,7 +372,10 @@ async def sample_create_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, budget]) + flattened_params = [parent, budget] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -505,7 +508,10 @@ async def sample_update_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([budget, update_mask]) + flattened_params = [budget, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -629,7 +635,10 @@ async def sample_get_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -747,7 +756,10 @@ async def sample_list_budgets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -858,7 +870,10 @@ async def sample_delete_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py index e32eb475ffeb..e6a03ddb8271 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1/services/budget_service/client.py @@ -790,7 +790,10 @@ def sample_create_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, budget]) + flattened_params = [parent, budget] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -920,7 +923,10 @@ def sample_update_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([budget, update_mask]) + flattened_params = [budget, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1041,7 +1047,10 @@ def sample_get_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1156,7 +1165,10 @@ def sample_list_budgets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1264,7 +1276,10 @@ def sample_delete_budget(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py index 007d6040cbe0..558c8aab67c5 100644 --- a/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py +++ b/packages/google-cloud-billing-budgets/google/cloud/billing/budgets_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.17.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-billing-budgets/noxfile.py b/packages/google-cloud-billing-budgets/noxfile.py index a9ceef47133c..0acc836b384e 100644 --- a/packages/google-cloud-billing-budgets/noxfile.py +++ b/packages/google-cloud-billing-budgets/noxfile.py @@ -382,20 +382,29 @@ def docfx(session): ["python", "upb", "cpp"], ) def prerelease_deps(session, protobuf_implementation): - """Run all tests with prerelease versions of dependencies installed.""" + """ + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-releases versions can be installed using + `pip install --pre `. + """ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies - session.install("-e", ".[all, tests, tracing]") + session.install("-e", ".") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + # Install dependencies for the unit test environment session.install(*unit_deps_all) + system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + SYSTEM_TEST_EXTRAS ) + # Install dependencies for the system test environment session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python @@ -417,6 +426,7 @@ def prerelease_deps(session, protobuf_implementation): ) ] + # Install dependencies specified in `testing/constraints-X.txt`. session.install(*constraints_deps) prerel_deps = [ @@ -458,3 +468,70 @@ def prerelease_deps(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) + + +@nox.session(python="3.13") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with local versions of core dependencies installed, + rather than pulling core dependencies from PyPI. + """ + + # Install all dependencies + session.install(".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras that should be installed. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + core_dependencies_from_source = [ + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + f"{CURRENT_DIRECTORY}/../googleapis-common-protos", + f"{CURRENT_DIRECTORY}/../grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--ignore-installed", "--no-deps") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json index c6ea803043ca..c24b57dc687a 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.17.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json index 343e03f0a576..2c44cd8f7269 100644 --- a/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json +++ b/packages/google-cloud-billing-budgets/samples/generated_samples/snippet_metadata_google.cloud.billing.budgets.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-billing-budgets", - "version": "1.17.0" + "version": "0.1.0" }, "snippets": [ {